Skip to content

Commit

Permalink
adding nsfw support
Browse files Browse the repository at this point in the history
  • Loading branch information
jay-dhanwant-yral committed Nov 14, 2024
1 parent beaf821 commit 76006e9
Show file tree
Hide file tree
Showing 9 changed files with 131 additions and 34 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/deploy-on-merge-to-main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ jobs:
- uses: superfly/flyctl-actions/setup-flyctl@master
- name: Set Fly secrets
run: |
flyctl secrets set --stage --app "yral-icpumpsearch" "SERVICE_CRED=$SERVICE_CRED"
flyctl secrets set --stage --app "yral-icpumpsearch" "GOOGLE_GENAI_API_KEY=$GOOGLE_GENAI_API_KEY"
flyctl secrets set --stage --app "prod-yral-icpumpsearch" "SERVICE_CRED=$(jq -n -c "${SERVICE_CRED}")"
flyctl secrets set --stage --app "prod-yral-icpumpsearch" "GOOGLE_GENAI_API_KEY=$(jq -n -c "${GOOGLE_GENAI_API_KEY}")"
env:
FLY_API_TOKEN: ${{ secrets.FLY_SEARCH_LLM_TOKEN }}
SERVICE_CRED: ${{ secrets.SERVICE_CRED }}
FLY_API_TOKEN: ${{ secrets.FLY_IO_DEPLOY_TOKEN }}
SERVICE_CRED: ${{ secrets.GOOGLE_CLOUD_EVENTS_BQ_SERVICE_ACCOUNT_KEY_JSON }}
GOOGLE_GENAI_API_KEY: ${{ secrets.GOOGLE_GENAI_API_KEY }}
- name: Deploy a docker container to fly.io
run: flyctl deploy --remote-only
env:
FLY_API_TOKEN: ${{ secrets.FLY_SEARCH_LLM_TOKEN }}
FLY_API_TOKEN: ${{ secrets.FLY_IO_DEPLOY_TOKEN }}
16 changes: 10 additions & 6 deletions fly.toml
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
# fly.toml app configuration file generated for yral-icpumpsearch on 2024-10-03T21:04:21+05:30
#
# See https://fly.io/docs/reference/configuration/ for information about how to use this file.
#

app = 'yral-icpumpsearch'
app = 'prod-yral-icpumpsearch'
primary_region = 'ams'
kill_signal = 'SIGINT'
kill_timeout = '5s'
swap_size_mb = 32768

[build]

[env]
PORT = '50051'

[http_service]
internal_port = 50051
force_https = true
auto_stop_machines = 'stop'
auto_start_machines = true
min_machines_running = 0
processes = ['app']

[[services]]
protocol = 'tcp'
internal_port = 50051
Expand Down
2 changes: 1 addition & 1 deletion localping.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def run():

# Make the call
try:
response = stub.Search(request, metadata=metadata)
response = stub.SearchV1(request, metadata=metadata)
print("Search Response:")
print(f"Answer: {response.answer}")
print('\n--XX--\n')
Expand Down
6 changes: 4 additions & 2 deletions ping_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
import search_rec_pb2
import search_rec_pb2_grpc
# server = 'localhost:50052'
server = 'yral-icpumpsearch.fly.dev:443'
# server = 'yral-icpumpsearch.fly.dev:443'
# server = 'stage-yral-icpumpsearch.fly.dev:443'
server = 'prod-yral-icpumpsearch.fly.dev:443'

def run():
# Read the token from the environment variable
Expand All @@ -15,7 +17,7 @@ def run():
request = search_rec_pb2.SearchRequest(input_query="what are some tokens related to dog? What are they talking about?")


response = stub.Search(request)#, metadata=metadata)
response = stub.SearchV1(request)#, metadata=metadata)
print("Search service is up and running!")
print("Received response:")
print(f"Answer: {response.answer}")
Expand Down
5 changes: 3 additions & 2 deletions search_agent_bq.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,15 +282,16 @@ def run_queries_and_save_results(queries, search_agent, output_file='test_case_r
log_file.write('X'*10 + '\n')
with open(output_file, 'a') as log_file:
start_time = time.time()
result_df, answer = search_agent.process_query(user_query)
result_df, answer, rag_data = search_agent.process_query(user_query)
end_time = time.time()
response_time = end_time - start_time

log_file.write(f"Query: {user_query}\n")
log_file.write(f"\nResponse: {answer}\n")
log_file.write(f"\nResponse time: {response_time:.2f} seconds\n")
log_file.write("\nTop 5 results:\n")
result = result_df[['token_name', 'description', 'created_at']].head()
result = result_df[['token_name', 'description', 'created_at', 'is_nsfw']].head()
# result = result_df.head()
# result = result_df.copy()


Expand Down
33 changes: 25 additions & 8 deletions search_rec.proto
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@ syntax = "proto3";
package search;

service SearchService {
rpc Search (SearchRequest) returns (SearchResponse) {}
rpc ContextualSearch (ContextualSearchRequest) returns (ContextualSearchResponse) {}
rpc Search(SearchRequest) returns (SearchResponse) {}
rpc SearchV1(SearchRequest) returns (SearchResponseV1) {}
rpc ContextualSearch(ContextualSearchRequest)
returns (ContextualSearchResponse) {}
}

message SearchRequest {
string input_query = 1;
}
message SearchRequest { string input_query = 1; }

message SearchResponse {
repeated SearchItem items = 1;
Expand All @@ -29,15 +29,32 @@ message SearchItem {
string created_at = 9;
}

message SearchResponseV1 {
repeated SearchItemV1 items = 1;
string answer = 2;
string rag_data = 3;
}

message SearchItemV1 {
string canister_id = 1;
string description = 2;
string host = 3;
string link = 4;
string logo = 5;
string token_name = 6;
string token_symbol = 7;
string user_id = 8;
string created_at = 9;
bool is_nsfw = 10;
}

message ContextualSearchRequest {
string input_query = 1;
repeated QueryResponsePair previous_interactions = 2;
string rag_data = 3;
}

message ContextualSearchResponse {
string answer = 1;
}
message ContextualSearchResponse { string answer = 1; }

message QueryResponsePair {
string query = 1;
Expand Down
22 changes: 13 additions & 9 deletions search_rec_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

45 changes: 44 additions & 1 deletion search_rec_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import search_rec_pb2 as search__rec__pb2

GRPC_GENERATED_VERSION = '1.66.2'
GRPC_GENERATED_VERSION = '1.67.1'
GRPC_VERSION = grpc.__version__
_version_not_supported = False

Expand Down Expand Up @@ -39,6 +39,11 @@ def __init__(self, channel):
request_serializer=search__rec__pb2.SearchRequest.SerializeToString,
response_deserializer=search__rec__pb2.SearchResponse.FromString,
_registered_method=True)
self.SearchV1 = channel.unary_unary(
'/search.SearchService/SearchV1',
request_serializer=search__rec__pb2.SearchRequest.SerializeToString,
response_deserializer=search__rec__pb2.SearchResponseV1.FromString,
_registered_method=True)
self.ContextualSearch = channel.unary_unary(
'/search.SearchService/ContextualSearch',
request_serializer=search__rec__pb2.ContextualSearchRequest.SerializeToString,
Expand All @@ -55,6 +60,12 @@ def Search(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def SearchV1(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def ContextualSearch(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
Expand All @@ -69,6 +80,11 @@ def add_SearchServiceServicer_to_server(servicer, server):
request_deserializer=search__rec__pb2.SearchRequest.FromString,
response_serializer=search__rec__pb2.SearchResponse.SerializeToString,
),
'SearchV1': grpc.unary_unary_rpc_method_handler(
servicer.SearchV1,
request_deserializer=search__rec__pb2.SearchRequest.FromString,
response_serializer=search__rec__pb2.SearchResponseV1.SerializeToString,
),
'ContextualSearch': grpc.unary_unary_rpc_method_handler(
servicer.ContextualSearch,
request_deserializer=search__rec__pb2.ContextualSearchRequest.FromString,
Expand Down Expand Up @@ -112,6 +128,33 @@ def Search(request,
metadata,
_registered_method=True)

@staticmethod
def SearchV1(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/search.SearchService/SearchV1',
search__rec__pb2.SearchRequest.SerializeToString,
search__rec__pb2.SearchResponseV1.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)

@staticmethod
def ContextualSearch(request,
target,
Expand Down
26 changes: 26 additions & 0 deletions server.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,32 @@ def Search(self, request, context):
except Exception as e:
_LOGGER.error(f"SearchAgent failed: {e}")
return response

def SearchV1(self, request, context):
search_query = request.input_query
# _LOGGER.info(f"Received search query: {search_query}")
response = search_rec_pb2.SearchResponseV1()
try:
df, answer, rag_data = self.search_agent.process_query(search_query)
response.answer = answer
total_responses_fetched = len(df)
for i in range(total_responses_fetched):
item = response.items.add()
item.canister_id = df.iloc[i]['canister_id']
item.description = df.iloc[i]['description']
item.host = df.iloc[i]['host']
item.link = df.iloc[i]['link']
item.logo = df.iloc[i]['logo']
item.token_name = df.iloc[i]['token_name']
item.token_symbol = df.iloc[i]['token_symbol']
item.user_id = df.iloc[i]['user_id']
item.created_at = df.iloc[i]['created_at']
item.is_nsfw = df.iloc[i]['is_nsfw']
response.rag_data = rag_data
except Exception as e:
_LOGGER.error(f"SearchAgent failed: {e}")
return response


def ContextualSearch(self, request, context):
input_query = request.input_query
Expand Down

0 comments on commit 76006e9

Please sign in to comment.