Spaces:
Running
Running
Commit
·
fc749f2
1
Parent(s):
98c7a08
feat: add some debugging prints
Browse files
public-prediction/kafka_consumer.py
CHANGED
@@ -21,8 +21,6 @@ def process_batch(batch: List[dict[str, any]], batch_size: int, gpt_helper: GetG
|
|
21 |
get_gpt_responses, data, gpt_helper) for data in batch]
|
22 |
results = [future.result() for future in futures]
|
23 |
|
24 |
-
print("Batch ready with gpt responses", results)
|
25 |
-
|
26 |
predictions = predict_custom_trained_model(
|
27 |
instances=results, project=os.environ.get("PROJECT_ID"), endpoint_id=os.environ.get("ENDPOINT_ID"))
|
28 |
|
@@ -41,6 +39,7 @@ def process_batch(batch: List[dict[str, any]], batch_size: int, gpt_helper: GetG
|
|
41 |
|
42 |
|
43 |
def send_results_back(full_results: dict[str, any], job_application_id: str):
|
|
|
44 |
url = "https://ta-2-sistem-cerdas-be-vi2jkj4riq-et.a.run.app/api/anti-cheat/update"
|
45 |
headers = {
|
46 |
"Content-Type": "application/json",
|
@@ -79,6 +78,8 @@ def consume_messages():
|
|
79 |
print("Continuing...")
|
80 |
continue
|
81 |
|
|
|
|
|
82 |
full_results = []
|
83 |
for i in range(0, len(full_batch), BATCH_SIZE):
|
84 |
batch = full_batch[i:i+BATCH_SIZE]
|
|
|
21 |
get_gpt_responses, data, gpt_helper) for data in batch]
|
22 |
results = [future.result() for future in futures]
|
23 |
|
|
|
|
|
24 |
predictions = predict_custom_trained_model(
|
25 |
instances=results, project=os.environ.get("PROJECT_ID"), endpoint_id=os.environ.get("ENDPOINT_ID"))
|
26 |
|
|
|
39 |
|
40 |
|
41 |
def send_results_back(full_results: dict[str, any], job_application_id: str):
|
42 |
+
print(f"Sending results back with job_app_id {job_application_id}")
|
43 |
url = "https://ta-2-sistem-cerdas-be-vi2jkj4riq-et.a.run.app/api/anti-cheat/update"
|
44 |
headers = {
|
45 |
"Content-Type": "application/json",
|
|
|
78 |
print("Continuing...")
|
79 |
continue
|
80 |
|
81 |
+
print(f"Parsing successful. Processing job_app_id {incoming_message['job_application_id']}")
|
82 |
+
|
83 |
full_results = []
|
84 |
for i in range(0, len(full_batch), BATCH_SIZE):
|
85 |
batch = full_batch[i:i+BATCH_SIZE]
|