Merge pull request #373 from Oloodi/368-sub-task-provision-and-copy-validation-database-instance

368 sub task provision and copy validation database instance
This commit is contained in:
José Salazar
2026-02-04 22:31:42 -05:00
committed by GitHub
14 changed files with 361 additions and 49 deletions

View File

@@ -95,10 +95,10 @@ class ClientHubsPage extends StatelessWidget {
).add(
ClientHubsIdentifyDialogToggled(hub: hub),
),
onDeletePressed: () =>
BlocProvider.of<ClientHubsBloc>(
context,
).add(ClientHubsDeleteRequested(hub.id)),
onDeletePressed: () => _confirmDeleteHub(
context,
hub,
),
),
),
],
@@ -221,4 +221,51 @@ class ClientHubsPage extends StatelessWidget {
),
);
}
Future<void> _confirmDeleteHub(BuildContext context, Hub hub) async {
final String hubName = hub.name.isEmpty ? 'this hub' : hub.name;
return showDialog<void>(
context: context,
barrierDismissible: false,
builder: (BuildContext dialogContext) {
return AlertDialog(
title: const Text('Confirm Hub Deletion'),
content: Column(
mainAxisSize: MainAxisSize.min,
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
Text('Are you sure you want to delete "$hubName"?'),
const SizedBox(height: UiConstants.space2),
const Text('This action cannot be undone.'),
const SizedBox(height: UiConstants.space2),
Text(
'Note that if there are any shifts/orders assigned to this hub we shouldn\'t be able to delete the hub.',
style: UiTypography.footnote1r.copyWith(
color: UiColors.textSecondary,
),
),
],
),
actions: <Widget>[
TextButton(
onPressed: () => Modular.to.pop(),
child: const Text('Cancel'),
),
TextButton(
onPressed: () {
BlocProvider.of<ClientHubsBloc>(
context,
).add(ClientHubsDeleteRequested(hub.id));
Modular.to.pop();
},
style: TextButton.styleFrom(
foregroundColor: UiColors.destructive,
),
child: const Text('Delete'),
),
],
);
},
);
}
}

View File

@@ -83,7 +83,7 @@ class SettingsActions extends StatelessWidget {
// Cancel button
UiButton.secondary(
text: t.common.cancel,
onPressed: () => Navigator.of(dialogContext).pop(),
onPressed: () => Modular.to.pop(),
),
],
),

View File

@@ -31,9 +31,11 @@ extension HomeNavigator on IModularNavigator {
/// Optionally provide a [tab] query param (e.g. `find`).
void pushShifts({String? tab}) {
if (tab == null) {
pushNamed('/worker-main/shifts');
navigate('/worker-main/shifts');
} else {
pushNamed('/worker-main/shifts?tab=$tab');
navigate('/worker-main/shifts', arguments: <String, dynamic>{
'initialTab': tab,
});
}
}

View File

@@ -132,8 +132,7 @@ class WorkerHomePage extends StatelessWidget {
EmptyStateWidget(
message: emptyI18n.no_shifts_today,
actionLink: emptyI18n.find_shifts_cta,
onAction: () =>
Modular.to.pushShifts(tab: 'find'),
onAction: () => Modular.to.pushShifts(tab: 'find'),
)
else
Column(

View File

@@ -31,6 +31,7 @@ class ShiftsBloc extends Bloc<ShiftsEvent, ShiftsState> {
on<LoadShiftsEvent>(_onLoadShifts);
on<LoadHistoryShiftsEvent>(_onLoadHistoryShifts);
on<LoadAvailableShiftsEvent>(_onLoadAvailableShifts);
on<LoadFindFirstEvent>(_onLoadFindFirst);
on<LoadShiftsForRangeEvent>(_onLoadShiftsForRange);
on<FilterAvailableShiftsEvent>(_onFilterAvailableShifts);
}
@@ -62,6 +63,7 @@ class ShiftsBloc extends Bloc<ShiftsEvent, ShiftsState> {
availableLoaded: false,
historyLoading: false,
historyLoaded: false,
myShiftsLoaded: true,
searchQuery: '',
jobType: 'all',
));
@@ -82,6 +84,7 @@ class ShiftsBloc extends Bloc<ShiftsEvent, ShiftsState> {
try {
final historyResult = await getHistoryShifts();
emit(currentState.copyWith(
myShiftsLoaded: true,
historyShifts: historyResult,
historyLoading: false,
historyLoaded: true,
@@ -113,6 +116,67 @@ class ShiftsBloc extends Bloc<ShiftsEvent, ShiftsState> {
}
}
Future<void> _onLoadFindFirst(
LoadFindFirstEvent event,
Emitter<ShiftsState> emit,
) async {
if (state is! ShiftsLoaded) {
emit(const ShiftsLoaded(
myShifts: [],
pendingShifts: [],
cancelledShifts: [],
availableShifts: [],
historyShifts: [],
availableLoading: false,
availableLoaded: false,
historyLoading: false,
historyLoaded: false,
myShiftsLoaded: false,
searchQuery: '',
jobType: 'all',
));
}
final currentState =
state is ShiftsLoaded ? state as ShiftsLoaded : null;
if (currentState != null && currentState.availableLoaded) return;
if (currentState != null) {
emit(currentState.copyWith(availableLoading: true));
}
try {
final availableResult =
await getAvailableShifts(const GetAvailableShiftsArguments());
final loadedState = state is ShiftsLoaded
? state as ShiftsLoaded
: const ShiftsLoaded(
myShifts: [],
pendingShifts: [],
cancelledShifts: [],
availableShifts: [],
historyShifts: [],
availableLoading: true,
availableLoaded: false,
historyLoading: false,
historyLoaded: false,
myShiftsLoaded: false,
searchQuery: '',
jobType: 'all',
);
emit(loadedState.copyWith(
availableShifts: _filterPastShifts(availableResult),
availableLoading: false,
availableLoaded: true,
));
} catch (_) {
if (state is ShiftsLoaded) {
final current = state as ShiftsLoaded;
emit(current.copyWith(availableLoading: false));
}
}
}
Future<void> _onLoadShiftsForRange(
LoadShiftsForRangeEvent event,
Emitter<ShiftsState> emit,
@@ -124,7 +188,10 @@ class ShiftsBloc extends Bloc<ShiftsEvent, ShiftsState> {
if (state is ShiftsLoaded) {
final currentState = state as ShiftsLoaded;
emit(currentState.copyWith(myShifts: myShiftsResult));
emit(currentState.copyWith(
myShifts: myShiftsResult,
myShiftsLoaded: true,
));
return;
}
@@ -138,6 +205,7 @@ class ShiftsBloc extends Bloc<ShiftsEvent, ShiftsState> {
availableLoaded: false,
historyLoading: false,
historyLoaded: false,
myShiftsLoaded: true,
searchQuery: '',
jobType: 'all',
));

View File

@@ -14,6 +14,8 @@ class LoadHistoryShiftsEvent extends ShiftsEvent {}
class LoadAvailableShiftsEvent extends ShiftsEvent {}
class LoadFindFirstEvent extends ShiftsEvent {}
class LoadShiftsForRangeEvent extends ShiftsEvent {
final DateTime start;
final DateTime end;

View File

@@ -22,6 +22,7 @@ class ShiftsLoaded extends ShiftsState {
final bool availableLoaded;
final bool historyLoading;
final bool historyLoaded;
final bool myShiftsLoaded;
final String searchQuery;
final String jobType;
@@ -35,6 +36,7 @@ class ShiftsLoaded extends ShiftsState {
required this.availableLoaded,
required this.historyLoading,
required this.historyLoaded,
required this.myShiftsLoaded,
required this.searchQuery,
required this.jobType,
});
@@ -49,6 +51,7 @@ class ShiftsLoaded extends ShiftsState {
bool? availableLoaded,
bool? historyLoading,
bool? historyLoaded,
bool? myShiftsLoaded,
String? searchQuery,
String? jobType,
}) {
@@ -62,6 +65,7 @@ class ShiftsLoaded extends ShiftsState {
availableLoaded: availableLoaded ?? this.availableLoaded,
historyLoading: historyLoading ?? this.historyLoading,
historyLoaded: historyLoaded ?? this.historyLoaded,
myShiftsLoaded: myShiftsLoaded ?? this.myShiftsLoaded,
searchQuery: searchQuery ?? this.searchQuery,
jobType: jobType ?? this.jobType,
);
@@ -78,6 +82,7 @@ class ShiftsLoaded extends ShiftsState {
availableLoaded,
historyLoading,
historyLoaded,
myShiftsLoaded,
searchQuery,
jobType,
];

View File

@@ -21,6 +21,7 @@ class ShiftsPage extends StatefulWidget {
class _ShiftsPageState extends State<ShiftsPage> {
late String _activeTab;
DateTime? _selectedDate;
bool _prioritizeFind = false;
final ShiftsBloc _bloc = Modular.get<ShiftsBloc>();
@override
@@ -28,12 +29,22 @@ class _ShiftsPageState extends State<ShiftsPage> {
super.initState();
_activeTab = widget.initialTab ?? 'myshifts';
_selectedDate = widget.selectedDate;
_bloc.add(LoadShiftsEvent());
print('ShiftsPage init: initialTab=$_activeTab');
_prioritizeFind = widget.initialTab == 'find';
if (_prioritizeFind) {
_bloc.add(LoadFindFirstEvent());
} else {
_bloc.add(LoadShiftsEvent());
}
if (_activeTab == 'history') {
print('ShiftsPage init: loading history tab');
_bloc.add(LoadHistoryShiftsEvent());
}
if (_activeTab == 'find') {
_bloc.add(LoadAvailableShiftsEvent());
print('ShiftsPage init: entering find tab (not loaded yet)');
if (!_prioritizeFind) {
_bloc.add(LoadAvailableShiftsEvent());
}
}
}
@@ -43,6 +54,7 @@ class _ShiftsPageState extends State<ShiftsPage> {
if (widget.initialTab != null && widget.initialTab != _activeTab) {
setState(() {
_activeTab = widget.initialTab!;
_prioritizeFind = widget.initialTab == 'find';
});
}
if (widget.selectedDate != null && widget.selectedDate != _selectedDate) {
@@ -86,6 +98,10 @@ class _ShiftsPageState extends State<ShiftsPage> {
final bool historyLoaded = (state is ShiftsLoaded)
? state.historyLoaded
: false;
final bool myShiftsLoaded = (state is ShiftsLoaded)
? state.myShiftsLoaded
: false;
final bool blockTabsForFind = _prioritizeFind && !availableLoaded;
// Note: "filteredJobs" logic moved to FindShiftsTab
// Note: Calendar logic moved to MyShiftsTab
@@ -124,7 +140,8 @@ class _ShiftsPageState extends State<ShiftsPage> {
"My Shifts",
UiIcons.calendar,
myShifts.length,
enabled: true,
showCount: myShiftsLoaded,
enabled: !blockTabsForFind,
),
const SizedBox(width: 8),
_buildTab(
@@ -143,7 +160,7 @@ class _ShiftsPageState extends State<ShiftsPage> {
UiIcons.clock,
historyShifts.length,
showCount: historyLoaded,
enabled: baseLoaded,
enabled: !blockTabsForFind && baseLoaded,
),
],
),

View File

@@ -22,6 +22,12 @@ class _FindShiftsTabState extends State<FindShiftsTab> {
String _searchQuery = '';
String _jobType = 'all';
@override
void initState() {
super.initState();
print('FindShiftsTab init: tab entered, data pending');
}
Widget _buildFilterTab(String id, String label) {
final isSelected = _jobType == id;
return GestureDetector(

View File

@@ -0,0 +1,13 @@
specVersion: "v1"
serviceId: "krow-workforce-db"
location: "us-central1"
schema:
source: "./schema"
datasource:
postgresql:
database: "krow_db"
cloudSql:
instanceId: "krow-sql"
# schemaValidation: "STRICT" # STRICT mode makes Postgres schema match Data Connect exactly.
# schemaValidation: "COMPATIBLE" # COMPATIBLE mode makes Postgres schema compatible with Data Connect.
connectorDirs: ["./connector"]

View File

@@ -0,0 +1,13 @@
specVersion: "v1"
serviceId: "krow-workforce-db-validation"
location: "us-central1"
schema:
source: "./schema"
datasource:
postgresql:
database: "krow_db"
cloudSql:
instanceId: "krow-sql-validation"
# schemaValidation: "STRICT" # STRICT mode makes Postgres schema match Data Connect exactly.
# schemaValidation: "COMPATIBLE" # COMPATIBLE mode makes Postgres schema compatible with Data Connect.
connectorDirs: ["./connector"]

View File

@@ -5,7 +5,7 @@ mutation seedAll @transaction {
data: {
id: "dvpWnaBjT6UksS5lo04hfMTyq1q1"
email: "legendary@krowd.com"
fullName: "Krow"
fullName: "Krow Payements"
role: USER
userRole: "BUSINESS"
}
@@ -26,7 +26,7 @@ mutation seedAll @transaction {
id: "ef69e942-d6e5-48e5-a8bc-69d3faa63b2f"
businessName: "Krow"
userId: "dvpWnaBjT6UksS5lo04hfMTyq1q1"
contactName: "Krow Ops"
contactName: "Krow Payements"
email: "legendary@krowd.com"
phone: "+1-818-555-0148"
address: "5000 San Jose Street, Granada Hills, CA, USA"

View File

@@ -1,6 +1,47 @@
# --- Data Connect / Backend ---
.PHONY: dataconnect-enable-apis dataconnect-init dataconnect-deploy dataconnect-sql-migrate dataconnect-generate-sdk dataconnect-sync dataconnect-bootstrap-db check-gcloud-beta dataconnect-clean
# Usage examples:
# make dataconnect-sync DC_ENV=dev
# make dataconnect-seed DC_ENV=validation
# make dataconnect-clean DC_ENV=validation
# make dataconnect-generate-sdk DC_ENV=dev
#
DC_ENV ?= dev
DC_SERVICE_DEV := krow-workforce-db
DC_SERVICE_VALIDATION := krow-workforce-db-validation
ifeq ($(DC_ENV),dev)
DC_SERVICE := $(DC_SERVICE_DEV)
else ifeq ($(DC_ENV),validation)
DC_SERVICE := $(DC_SERVICE_VALIDATION)
else
$(error Invalid DC_ENV '$(DC_ENV)'. Use DC_ENV=dev or DC_ENV=validation)
endif
.PHONY: dataconnect-enable-apis dataconnect-init dataconnect-deploy dataconnect-sql-migrate dataconnect-generate-sdk dataconnect-sync dataconnect-bootstrap-db check-gcloud-beta dataconnect-clean dataconnect-bootstrap-validation-db dataconnect-file dataconnect-file-validation dataconnect-file-dev dataconnect-seed dataconnect-test
#creation dataconnect file
dataconnect-file:
@echo "--> Starting creation Firebase Data Connect schema file for service [$(DC_SERVICE)]..."
@test -f backend/dataconnect/dataconnect.$(DC_ENV).yaml || (echo "❌ Missing backend/dataconnect/dataconnect.$(DC_ENV).yaml" && exit 1)
@cp backend/dataconnect/dataconnect.$(DC_ENV).yaml backend/dataconnect/dataconnect.yaml
@echo "✅ Creation Data Connect file completed."
#creation dev dataconnect file
dataconnect-file-dev:
@echo "--> Starting creation Firebase Data Connect schema file for service [$(DC_SERVICE)]..."
@test -f backend/dataconnect/dataconnect.dev.yaml || (echo "❌ Missing backend/dataconnect/dataconnect.dev.yaml" && exit 1)
@cp backend/dataconnect/dataconnect.dev.yaml backend/dataconnect/dataconnect.yaml
@echo "✅ Creation Data Connect file completed."
#creation validation dataconnect file
dataconnect-file-validation:
@echo "--> Starting creation Firebase Data Connect schema file for service [$(DC_SERVICE)]..."
@test -f backend/dataconnect/dataconnect.validation.yaml || (echo "❌ Missing backend/dataconnect/dataconnect.validation.yaml" && exit 1)
@cp backend/dataconnect/dataconnect.validation.yaml backend/dataconnect/dataconnect.yaml
@echo "✅ Creation Data Connect file completed."
# Enable all required APIs for Firebase Data Connect + Cloud SQL
dataconnect-enable-apis:
@@ -14,63 +55,94 @@ dataconnect-enable-apis:
@echo "✅ APIs enabled for project [$(GCP_PROJECT_ID)]."
# Initialize Firebase Data Connect (interactive wizard).
# use only once per project
dataconnect-init:
@echo "--> Initializing Firebase Data Connect for alias [$(FIREBASE_ALIAS)] (project: $(GCP_PROJECT_ID))..."
@firebase init dataconnect --project $(FIREBASE_ALIAS)
@echo "✅ Data Connect initialization command executed. Follow the interactive steps in the CLI."
# Deploy Data Connect schemas (GraphQL → Cloud SQL)
dataconnect-deploy:
@echo "--> Deploying Firebase Data Connect schemas to [$(ENV)] (project: $(FIREBASE_ALIAS))..."
@firebase deploy --only dataconnect --project=$(FIREBASE_ALIAS)
dataconnect-deploy: dataconnect-file
@echo "--> Deploying Firebase Data Connect schemas to [$(ENV)] (service: $(DC_SERVICE)) (project: $(FIREBASE_ALIAS))..."
@firebase deploy --only dataconnect:$(DC_SERVICE) --project=$(FIREBASE_ALIAS)
@echo "✅ Data Connect deployment completed for [$(ENV)]."
# Apply pending SQL migrations for Firebase Data Connect
dataconnect-sql-migrate:
@echo "--> Applying Firebase Data Connect SQL migrations to [$(ENV)] (project: $(FIREBASE_ALIAS))..."
dataconnect-sql-migrate: dataconnect-file
@echo "--> Applying Firebase Data Connect SQL migrations to [$(ENV)] (service: $(DC_SERVICE)) (project: $(FIREBASE_ALIAS))..."
@firebase dataconnect:sql:migrate --project=$(FIREBASE_ALIAS)
@echo "✅ Data Connect SQL migration completed for [$(ENV)]."
# Generate Data Connect client SDK for frontend-web and internal-api-harness
dataconnect-generate-sdk:
dataconnect-generate-sdk: dataconnect-file
@echo "--> Generating Firebase Data Connect SDK for web frontend and API harness..."
@firebase dataconnect:sdk:generate --project=$(FIREBASE_ALIAS)
@firebase dataconnect:sdk:generate --project=$(FIREBASE_ALIAS)
@echo "✅ Data Connect SDK generation completed for [$(ENV)]."
# Unified backend schema update workflow (schema -> deploy -> SDK)
dataconnect-sync:
@echo "--> [1/3] Deploying Data Connect..."
@firebase deploy --only dataconnect --project=$(FIREBASE_ALIAS)
@echo "--> [2/3] Applying SQL migrations..."
@firebase dataconnect:sql:migrate --project=$(FIREBASE_ALIAS)
@echo "--> [3/3] Regenerating SDK..."
dataconnect-sync: dataconnect-file
@echo "--> [1/3] Deploying Data Connect [$(DC_SERVICE)]..."
@firebase deploy --only dataconnect:$(DC_SERVICE) --project=$(FIREBASE_ALIAS)
@echo "--> [2/3] Applying SQL migrations [$(DC_SERVICE)]..."
@firebase dataconnect:sql:migrate $(DC_SERVICE) --project=$(FIREBASE_ALIAS)
@echo "--> [3/3] Regenerating SDK [$(DC_SERVICE)]..."
@firebase dataconnect:sdk:generate --project=$(FIREBASE_ALIAS)
@echo "✅ Data Connect SQL, deploy, and SDK generation completed for [$(ENV)]."
@echo "✅ Data Connect SQL, deploy, and SDK generation [$(ENV)]."
# Execute seed in Firebase Data Connect
dataconnect-seed:
@echo "--> Exec seed in Firebase Data Connect..."
@firebase dataconnect:execute backend/dataconnect/functions/seed.gql --project=$(FIREBASE_ALIAS)
dataconnect-seed: dataconnect-file
@echo "--> Exec seed in Firebase Data Connect (service: $(DC_SERVICE))..."
@firebase dataconnect:execute backend/dataconnect/functions/seed.gql --project=$(FIREBASE_ALIAS)
@echo "✅ Seed executed successfully."
# Execute clean, to delete all the data in Firebase Data Connect
dataconnect-clean:
@echo "--> Exec clean all the data in Firebase Data Connect..."
@firebase dataconnect:execute backend/dataconnect/functions/clean.gql --project=$(FIREBASE_ALIAS)
dataconnect-clean: dataconnect-file
@echo "--> Exec clean all the data in Firebase Data Connect (service: $(DC_SERVICE))..."
@firebase dataconnect:execute backend/dataconnect/functions/clean.gql --project=$(FIREBASE_ALIAS)
@echo "✅ Clean information executed successfully."
# Run tests for Data Connect deployment and migrations
dataconnect-test:
@echo "--> Running Data Connect tests..."
@echo "--> [1/3] Deploying Data Connect..."
@firebase deploy --only dataconnect --project=$(FIREBASE_ALIAS) --dry-run
@echo "--> [2/3] Applying SQL migrations..."
dataconnect-test: dataconnect-file
@echo "--> Running Data Connect tests (service: $(DC_SERVICE))..."
@echo "--> [1/2] Deploying Data Connect..."
@firebase deploy --only dataconnect:$(DC_SERVICE) --project=$(FIREBASE_ALIAS) --dry-run
@echo "--> [2/2] Applying SQL migrations..."
@firebase dataconnect:sql:diff --project=$(FIREBASE_ALIAS)
@echo "✅ Data Connect tests completed."
dataconnect-backup-dev-to-validation:
@echo "🔍 Validating instances exist in [$(GCP_PROJECT_ID)]..."
@if ! gcloud sql instances describe krow-sql --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
echo "❌ Dev instance 'krow-sql' not found in project $(GCP_PROJECT_ID)."; \
exit 1; \
fi
@if ! gcloud sql instances describe krow-sql-validation --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
echo "❌ Validation instance 'krow-sql-validation' not found in project $(GCP_PROJECT_ID)."; \
exit 1; \
fi
@echo "✅ Instances found."
@echo "🧰 Creating a backup on dev (krow-sql)..."
@gcloud sql backups create --instance=krow-sql --project=$(GCP_PROJECT_ID) >/dev/null
@echo "🔎 Fetching latest backup ID..."
@LATEST_BACKUP_ID="$$(gcloud sql backups list --instance=krow-sql --project=$(GCP_PROJECT_ID) --sort-by=~endTime --limit=1 --format='value(id)')"; \
if [ -z "$$LATEST_BACKUP_ID" ]; then \
echo "❌ Could not find any backup ID for krow-sql."; \
exit 1; \
fi; \
echo "✅ Latest backup ID: $$LATEST_BACKUP_ID"; \
echo "⚠️ Restoring into validation..."; \
gcloud sql backups restore $$LATEST_BACKUP_ID \
--restore-instance=krow-sql-validation \
--backup-instance=krow-sql \
--project=$(GCP_PROJECT_ID)
@echo "🎉 Done."
# -------------------------------------------------------------------
# ONE-TIME FULL SETUP FOR CLOUD SQL + DATA CONNECT
# ONE-TIME FULL SETUP FOR CLOUD SQL + DATA CONNECT DEV
# -------------------------------------------------------------------
# Check if gcloud and beta group are available
@@ -85,7 +157,7 @@ check-gcloud-beta:
}
@echo "✅ gcloud CLI and 'gcloud beta' are available."
dataconnect-bootstrap-db: check-gcloud-beta
dataconnect-bootstrap-db: dataconnect-file-dev check-gcloud-beta
@echo "🔍 Checking if Cloud SQL instance krow-sql already exists in [$(GCP_PROJECT_ID)]..."
@if gcloud sql instances describe krow-sql --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
echo "⚠️ Cloud SQL instance 'krow-sql' already exists in project $(GCP_PROJECT_ID)."; \
@@ -112,7 +184,7 @@ dataconnect-bootstrap-db: check-gcloud-beta
@echo "⚠️ Creating Firebase Data Connect service identity..."
gcloud beta services identity create \
--service=firebasedataconnect.googleapis.com \
--project=$(GCP_PROJECT_ID)
--project=$(GCP_PROJECT_ID)
@echo "⚠️ Enabling IAM authentication on Cloud SQL instance krow-sql..."
gcloud sql instances patch krow-sql \
@@ -124,9 +196,77 @@ dataconnect-bootstrap-db: check-gcloud-beta
firebase dataconnect:sql:setup krow-workforce-db --project=$(FIREBASE_ALIAS)
@echo "⚠️ Deploying initial Data Connect configuration..."
@firebase deploy --only dataconnect --project=$(FIREBASE_ALIAS)
@firebase deploy --only dataconnect:$(DC_SERVICE_DEV) --project=$(FIREBASE_ALIAS)
@echo "⚠️ Generating initial Data Connect SDK..."
@firebase dataconnect:sdk:generate --project=$(FIREBASE_ALIAS)
@firebase dataconnect:sdk:generate --project=$(FIREBASE_ALIAS)
@echo "🎉 Cloud SQL + Data Connect bootstrap completed successfully!"
# -------------------------------------------------------------------
# ONE-TIME FULL SETUP FOR CLOUD SQL + DATA CONNECT VALIDATEION
# -------------------------------------------------------------------
# Creates: krow-sql-validation + krow_db (inside it) + links service krow-workforce-db-validation
# Then clones data from krow-sql -> krow-sql-validation via backup/restore
dataconnect-bootstrap-validation-database: dataconnect-file-validation
@echo "🔍 Checking if Cloud SQL instance krow-sql-validation already exists in [$(GCP_PROJECT_ID)]..."
@if gcloud sql instances describe krow-sql-validation --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
echo "⚠️ Cloud SQL instance 'krow-sql-validation' already exists in project $(GCP_PROJECT_ID)."; \
echo " If you need to recreate it, delete the instance manually first."; \
exit 1; \
fi
@echo "⚠️ Creating Cloud SQL instance krow-sql-validation (tier: $(SQL_TIER))..."
gcloud sql instances create krow-sql-validation \
--database-version=POSTGRES_15 \
--tier=$(SQL_TIER) \
--region=us-central1 \
--storage-size=10 \
--storage-auto-increase \
--availability-type=zonal \
--backup-start-time=03:00 \
--project=$(GCP_PROJECT_ID)
@echo "⚠️ Creating Cloud SQL database krow_db on krow-sql-validation..."
gcloud sql databases create krow_db \
--instance=krow-sql-validation \
--project=$(GCP_PROJECT_ID)
@echo "⚠️ Enabling IAM authentication on Cloud SQL instance krow-sql-validation..."
gcloud sql instances patch krow-sql-validation \
--project=$(GCP_PROJECT_ID) \
--database-flags=cloudsql.iam_authentication=on \
--quiet
@echo "🔁 Creating a backup on dev instance (krow-sql) to clone data into validation..."
@echo " (Prereq: krow-sql must already exist and be stable.)"
gcloud sql backups create --instance=krow-sql --project=$(GCP_PROJECT_ID)
echo "✅ Backup creation started. Operation: $$BACKUP_OP"
@echo "🔎 Fetching latest backup ID from krow-sql..."
@BACKUP_ID="$$(gcloud sql backups list --instance=krow-sql --project=$(GCP_PROJECT_ID) --limit=1 --sort-by=~endTime --format='value(id)')"; \
if [ -z "$$BACKUP_ID" ]; then \
echo "❌ Could not find a backup ID for krow-sql."; \
exit 1; \
fi; \
echo "✅ Latest backup ID: $$BACKUP_ID"; \
echo "⚠️ Restoring backup into krow-sql-validation..."; \
gcloud sql backups restore $$BACKUP_ID \
--restore-instance=krow-sql-validation \
--backup-instance=krow-sql \
--project=$(GCP_PROJECT_ID)
@echo "⚠️ Linking Data Connect service (krow-workforce-db-validation) with Cloud SQL..."
@echo " When prompted, select instance: krow-sql-validation and database: krow_db"
firebase dataconnect:sql:setup krow-workforce-db-validation --project=$(FIREBASE_ALIAS)
@echo "⚠️ Deploying Data Connect configuration ($(DC_SERVICE_VALIDATION))..."
@firebase deploy --only dataconnect:$(DC_SERVICE_VALIDATION) --project=$(FIREBASE_ALIAS)
@echo "⚠️ Generating Data Connect SDK ($(DC_SERVICE))..."
@firebase dataconnect:sdk:generate --project=$(FIREBASE_ALIAS)
@echo "🎉 Validation Cloud SQL + Data Connect bootstrap completed successfully!"

View File

@@ -2,13 +2,13 @@
.PHONY: launchpad-dev deploy-launchpad-hosting
launchpad-dev:
launchpad-dev: sync-prototypes
@echo "--> Starting local Launchpad server using Firebase Hosting emulator..."
@echo " - Generating secure email hashes..."
@node scripts/generate-allowed-hashes.js
@firebase serve --only hosting:launchpad --project=$(FIREBASE_ALIAS)
deploy-launchpad-hosting:
deploy-launchpad-hosting: sync-prototypes
@echo "--> Deploying Internal Launchpad to Firebase Hosting..."
@echo " - Generating secure email hashes..."
@node scripts/generate-allowed-hashes.js