diff --git a/.github/workflows/docker/compose/struct2graph-compose.yaml b/.github/workflows/docker/compose/struct2graph-compose.yaml new file mode 100644 index 0000000000..18fe47add5 --- /dev/null +++ b/.github/workflows/docker/compose/struct2graph-compose.yaml @@ -0,0 +1,9 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# this file should be run in the root of the repo +services: + struct2graph: + build: + dockerfile: comps/struct2graph/src/Dockerfile + image: ${REGISTRY:-opea}/struct2graph:${TAG:-latest} diff --git a/comps/cores/mega/constants.py b/comps/cores/mega/constants.py index b0639b9b29..c978049c69 100644 --- a/comps/cores/mega/constants.py +++ b/comps/cores/mega/constants.py @@ -36,6 +36,7 @@ class ServiceType(Enum): TEXT2SQL = 19 TEXT2GRAPH = 20 TEXT2CYPHER = 21 + STRUCT2GRAPH = 23 class MegaServiceEndpoint(Enum): diff --git a/comps/struct2graph/deployment/docker_compose/README.md b/comps/struct2graph/deployment/docker_compose/README.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/comps/struct2graph/deployment/docker_compose/struct2graph-compose.yaml b/comps/struct2graph/deployment/docker_compose/struct2graph-compose.yaml new file mode 100644 index 0000000000..6aa643b37f --- /dev/null +++ b/comps/struct2graph/deployment/docker_compose/struct2graph-compose.yaml @@ -0,0 +1,38 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +include: + - ../../../third_parties/neo4j/deployment/docker_compose/compose.yaml + +services: + struct2graph: + image: opea/struct2graph:latest + container_name: struct2graph + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - NEO4J_URL=${NEO4J_URL} + - NEO4J_server_directories_import=import + - NEO4J_PLUGINS=["apoc"] + - NEO4J_dbms_security_allow__csv__import__from__file__urls=true + - NEO4J_server_directories_import='/var/lib/neo4j/import' + - NEO4J_dbms_security_procedures_unrestricted=apoc.\\\* neo4j:5.23.0 + ports: + - ${STRUCT2GRAPH_PORT:-8090}:8090 + depends_on: + neo4j-apoc: + condition: service_healthy + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:7474"] + interval: 10s + timeout: 5s + retries: 10 + start_period: 30s + ipc: host + network_mode: host + restart: always + +networks: + default: + driver: bridge diff --git a/comps/struct2graph/src/Dockerfile b/comps/struct2graph/src/Dockerfile new file mode 100644 index 0000000000..976e32dadf --- /dev/null +++ b/comps/struct2graph/src/Dockerfile @@ -0,0 +1,37 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +FROM ubuntu:22.04 + +WORKDIR /home/graph_extract + +FROM python:3.11-slim +ENV LANG=C.UTF-8 +ARG ARCH=cpu + +RUN apt-get update -y && apt-get install vim -y && apt-get install -y --no-install-recommends --fix-missing \ + build-essential + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +COPY comps /home/user/comps + +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + if [ ${ARCH} = "cpu" ]; then \ + pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu -r /home/user/comps/struct2graph/src/requirements.txt; \ + else \ + pip install --no-cache-dir -r /home/user/comps/struct2graph/src/requirements.txt; \ + fi + +ENV https_proxy=${https_proxy} +ENV http_proxy=${http_proxy} +ENV no_proxy=${no_proxy} +ENV PYTHONPATH="/home/user/":$PYTHONPATH + +USER user + +WORKDIR /home/user/comps/struct2graph/src/ + +ENTRYPOINT ["python", "opea_struct2graph_microservice.py"] diff --git a/comps/struct2graph/src/README.md b/comps/struct2graph/src/README.md new file mode 100644 index 0000000000..f66676e7dd --- /dev/null +++ b/comps/struct2graph/src/README.md @@ -0,0 +1,128 @@ +# Struct2Graph Microservice + +The Struct2Graph Microservice represents a powerful solution for transforming structured data formats like csv and json into Neo4j graph structures, serving as a crucial bridge between traditional data sources and modern graph-based systems. This process allows for enriching existing graphs, performing advanced data analysis, and constructing comprehensive knowledge graphs. +By importing structured data, users can integrate it into RAG flows, enhance querying capabilities to uncover patterns and relationships across large datasets. It's particularly useful for populating databases, creating hierarchical structures, and enabling cross-document querying. Furthermore, this approach supports data integration to provide a solid foundation for developing sophisticated graph-based applications that can exploit the rich relationships and properties inherent in graph data structures. + +## Features + +To convert structured data from CSV and JSON we provide the following interface - +Input: + +``` +{ + "input_text": "string", + "task": "string", + "cypher_cmd": "string" +} +``` + +Output: Directory with results to query. + +The task can be set to the following - + +1. Index - generates index based on the cypher command (Output: Generated index) +2. Query - queries the index based on the input text (Output: Directory with results to query) + +## Implementation + +The struct2graph microservice is able to load and query structured data through neo4j. +The service is hosted in a docker. The mode of operation is through docker build + run or using docker compose. + +## 🚀1. Start Microservice with docker run + +### Install Requirements + +```bash +pip install -r requirements.txt +``` + +### Export environment variables + +``` +cd comps/struct2graph/src/ +source environment_setup.sh +``` + +OR + +``` +export https_proxy=${https_proxy} +export http_proxy=${http_proxy} +export no_proxy=${no_proxy} +export INDEX_NAME=${INDEX_NAME:-"graph_store"} +export PYTHONPATH="/home/user/" +export NEO4J_USERNAME=${NEO4J_USERNAME:-"neo4j"} +export NEO4J_PASSWORD=${NEO4J_PASSWORD:-"neo4j_password"} +export NEO4J_URL=${NEO4J_URL:-"neo4j://neo4j-apoc:7687"} +export DATA_DIRECTORY=${DATA_DIRECTORY:-data} +export STRUCT2GRAPH_PORT=8090 +export LOAD_FORMAT="CSV" # or JSON +``` + +### Launch Neo4j Service + +Refer to [this link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/neo4j/src/README.md) to start and verify the neo4j microservice. + +### Verify the Neo4j Service + +```bash +curl -v http://localhost:7474 +``` + +If the Neo4j server is running correctly, the response should include an HTTP status code of 200 OK. Any other status code or an error message indicates that the server is not running or is not accessible. If the port 7474 is mapped to another port, you should change the port in the command accordingly. + +### Start struct2graph Microservice with Docker + +Command to build struct2graph microservice - + +```bash +docker build -f Dockerfile -t opea/struct2graph:latest ../../../ +``` + +Command to run struct2graph microservice - + +```bash +docker run -i -t --net=host --ipc=host -p STRUCT2GRAPH_PORT opea/struct2graph:latest +``` + +The docker launches the struct2graph microservice interactively. + +## 🚀2. Start Microservice with docker compose + +Export environment variables as mentioned in option 1. + +Command to run docker compose - + +```bash +cd GenAIComps/tests/struct2graph/deployment/docker_compose + +docker compose -f struct2graph-compose.yaml up +``` + +## 3. Validate the service using API endpoint + +Example for "index" task - + +```bash +curl -X POST http://localhost:$STRUCT2GRAPH_PORT/v1/struct2graph \ +-H "accept: application/json" \ +-H "Content-Type: application/json" \ +-d '{ + "input_text": "", + "task": "Index", + "cypher_cmd": "LOAD CSV WITH HEADERS FROM \'file:///$DATA_DIRECTORY/test1.csv\' AS row CREATE (:Person {ID: toInteger(row.ID), Name: row.Name, Age: toInteger(row.Age), City: row.City})" +}' +``` + +Example for "query" task - + +```bash +curl -X POST http://localhost:$STRUCT2GRAPH_PORT/v1/struct2graph \ +-H "accept: application/json" \ +-H "Content-Type: application/json" \ +-d '{ + "input_text": "MATCH (p:Person {Name:\'Alice\'}) RETURN p", + "task": "Query", + "cypher_cmd": "" +}' +``` diff --git a/comps/struct2graph/src/environment_setup.sh b/comps/struct2graph/src/environment_setup.sh new file mode 100644 index 0000000000..162cacbc36 --- /dev/null +++ b/comps/struct2graph/src/environment_setup.sh @@ -0,0 +1,34 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +####################################################################### +# Proxy +####################################################################### +export https_proxy=${https_proxy} +export http_proxy=${http_proxy} +export no_proxy=${no_proxy} +################################################################ +# Configure LLM Parameters based on the model selected. +################################################################ +export INDEX_NAME=${INDEX_NAME:-"graph_store"} +export PYTHONPATH="/home/user/" +export NEO4J_USERNAME=${NEO4J_USERNAME:-"neo4j"} +export NEO4J_PASSWORD=${NEO4J_PASSWORD:-"neo4j_password"} +export NEO4J_URL=${NEO4J_URL:-"neo4j://neo4j-apoc:7687"} +export DATA_DIRECTORY=${DATA_DIRECTORY:-data} +export FILENAME=${FILENAME:-test1.csv} +export LOAD_FORMAT=${LOAD_FORMAT:-"CSV"} + + +export CYPHER_CSV_CMD="LOAD CSV WITH HEADERS FROM 'file:////test1.csv' AS row \ +CREATE (:Person {ID: toInteger(row.ID), Name: row.Name, Age: toInteger(row.Age), City: row.City});" +export CYPHER_JSON_CMD=" \ +CALL apoc.load.json("file:///test1.json") YIELD value \ +UNWIND value.table AS row \ +CREATE (:Person { \ + ID: row.ID, \ + Name: row.Name, \ + Age: row.Age, \ + City: row.City \ + }); \ + " diff --git a/comps/struct2graph/src/integrations/graph_utils.py b/comps/struct2graph/src/integrations/graph_utils.py new file mode 100644 index 0000000000..39feccdbf9 --- /dev/null +++ b/comps/struct2graph/src/integrations/graph_utils.py @@ -0,0 +1,99 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import logging +import os + +from langchain_neo4j import Neo4jGraph + +from comps import CustomLogger + +logger = CustomLogger("opea_struct2graph") + + +class PrepareGraphDB: + """A class for preparing and saving a GraphDB.""" + + def __init__(self): + self.graph_store = self.neo4j_link() + + def neo4j_link(self): + NEO4J_URL = os.getenv("NEO4J_URL") + NEO4J_USERNAME = os.getenv("NEO4J_USERNAME") + NEO4J_PASSWORD = os.getenv("NEO4J_PASSWORD") + NEO4J_DATABASE = os.getenv("NEO4J_DATABASE") + + if not all([NEO4J_URL, NEO4J_USERNAME, NEO4J_PASSWORD]): + raise EnvironmentError("Missing required Neo4j environment variables") + + graph_store = Neo4jGraph(username=NEO4J_USERNAME, password=NEO4J_PASSWORD, url=NEO4J_URL) + return graph_store + + def cleanup_neo4j(self): + try: + cypher = """MATCH (n) DETACH DELETE n""" + self.graph_store.query(cypher) + + logger.info("## Existing graph_store schema...") + logger.info(self.graph_store.schema) + + logger.info("Deleting all nodes...") + cypher = """MATCH (n) RETURN count(n)""" + result = self.graph_store.query(cypher) + + logger.info("Dropping all constraints...") + for constraint in self.graph_store.query("SHOW CONSTRAINTS"): + self.graph_store.query(f"DROP CONSTRAINT {constraint['name']}") + + logger.info("Dropping all indexes...") + for index in self.graph_store.query("SHOW INDEXES"): + logger.info(f"Removing index {index['name']}:") + self.graph_store.query(f"""DROP INDEX `{index['name']}`""") + + logger.info("## Blank schema...") + self.graph_store.refresh_schema() + logger.info(self.graph_store.schema) + return + + except Exception as e: + logger.error(f"Failed to cleanup Neo4j database: {str(e)}") + raise + + def load_graphdb(self, cypher_cmd): + LOAD_FORMAT = os.getenv("LOAD_FORMAT", "CSV") + + try: + if LOAD_FORMAT == "CSV": + cypher_csv_insert = cypher_cmd + logger.info(f"INSERTING CSV Cypher command : {cypher_csv_insert}") + logger.info("Preparing graphdb...") + self.graph_store.query(cypher_csv_insert) + logger.info("GraphDB is created and saved.") + + elif LOAD_FORMAT == "JSON": + cypher_json_insert = cypher_cmd + logger.info(f"INSERTING JSON Cypher command : {cypher_json_insert}") + self.graph_store.query(cypher_json_insert) + logger.info(f"The following is the graph schema \n\n {self.graph_store.schema}") + logger.info("GraphDB is created and saved.") + + else: + logger.error("Only CSV and JSON formats are supported") + raise ValueError("Only CSV and JSON formats are supported") + + logger.info("Preparing graphdb...") + return self.graph_store + + except NameError: + raise ValueError("Error: The variable CYPHER_CSV_CMD is not set.") + + def prepare_insert_graphdb(self, cypher_cmd): + logger.info("Cleaning up graph db") + self.cleanup_neo4j() + logger.info("Done cleaning up graph db") + self.load_graphdb(cypher_cmd) + logger.info("Completed inserting into graphdb") + logger.info(f"The following is the graph schema \n\n {self.graph_store.schema}") + logger.info("Preparing graphdb...") + logger.info("GraphDB is created and saved.") + return self.graph_store diff --git a/comps/struct2graph/src/integrations/opea.py b/comps/struct2graph/src/integrations/opea.py new file mode 100644 index 0000000000..e076f27897 --- /dev/null +++ b/comps/struct2graph/src/integrations/opea.py @@ -0,0 +1,106 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import os +import time +from typing import Annotated, Optional + +import requests +from pydantic import BaseModel, Field + +from comps import CustomLogger, OpeaComponent, OpeaComponentRegistry, ServiceType +from comps.struct2graph.src.integrations.graph_utils import PrepareGraphDB + +global graph_store +logger = CustomLogger("comps-struct2graph") +logflag = os.getenv("LOGFLAG", False) + +graph_params = { + "max_string_length": 3600, +} + +generation_params = { + "max_new_tokens": 1024, + "top_k": 10, + "top_p": 0.95, + "temperature": 0.01, + "repetition_penalty": 1.03, + "streaming": True, +} + + +class Input(BaseModel): + input_text: str + task: str + cypher_cmd: str + + +@OpeaComponentRegistry.register("OPEA_STRUCT2GRAPH") +class OpeaStruct2Graph(OpeaComponent): + """A specialized text to graph triplet converter.""" + + def __init__(self, name: str, description: str, config: dict = None): + super().__init__(name, ServiceType.STRUCT2GRAPH.name.lower(), description, config) + self.db = self.__initialize_db() + health_status = self.check_health() + if not health_status: + logger.error("OpeaStruct2Graph health check failed.") + + def __initialize_db(self): + """Initialize the graph database connection and return it.""" + logger.info("Initializing graph database...") + return PrepareGraphDB() + + async def check_health(self) -> bool: + """Checks the health of connection to the neo4j service. + + Returns: + bool: True if the service is reachable and healthy, False otherwise. + """ + try: + logger.info("Performing health check...") + response = requests.get("http://localhost:7474", timeout=5) + if response.status_code == 200: + return True + else: + logger.error(f"Health check failed with status code: {response.status_code}") + return False + except Exception as e: + logger.error(f"Health check failed: {str(e)}") + return False + + async def invoke(self, input: Input) -> dict: + """Invokes the struct2graph service to generate graph(s) for the provided input. + + Args: + input: Input object containing: + - input_text: text document + - task: Query or Index + - cypher_cmd: CSV or JSON command + + Returns: + dict: Result of the operation + """ + logger.info("Starting struct2graph operation...") + logger.debug(f"Received input: {input}") + + if input.task == "Query": + logger.info("Executing query operation") + graph_store = self.db.neo4j_link() + result = graph_store.query(input.input_text) + logger.info("Query executed successfully") + + elif input.task == "Index": + logger.info("Executing index operation") + graph_store = self.db.prepare_insert_graphdb(cypher_cmd=input.cypher_cmd) + result = "Done indexing" + logger.info("Indexing completed successfully") + + else: + logger.error(f"Unsupported task type: {input.task}") + raise ValueError(f"Unsupported task type: {input.task}") + + logger.info("Operation completed successfully") + return result diff --git a/comps/struct2graph/src/opea_struct2graph_microservice.py b/comps/struct2graph/src/opea_struct2graph_microservice.py new file mode 100644 index 0000000000..dfbc93f13f --- /dev/null +++ b/comps/struct2graph/src/opea_struct2graph_microservice.py @@ -0,0 +1,54 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import os +import pathlib +import sys + +from fastapi.exceptions import HTTPException + +from comps import CustomLogger, OpeaComponentLoader, opea_microservices, register_microservice +from comps.struct2graph.src.integrations.opea import Input, OpeaStruct2Graph + +cur_path = pathlib.Path(__file__).parent.resolve() +comps_path = os.path.join(cur_path, "../../../") +sys.path.append(comps_path) + +logger = CustomLogger("struct2graph") +logflag = os.getenv("LOGFLAG", False) + +struct2graph_component_name = os.getenv("STRUCT2GRAPH_COMPONENT_NAME", "OPEA_STRUCT2GRAPH") + +# Initialize OpeaComponentLoader +loader = OpeaComponentLoader( + struct2graph_component_name, + description=f"OPEA struct2graph Component: {struct2graph_component_name}", +) + + +@register_microservice( + name="opea_service@struct2graph", + endpoint="/v1/struct2graph", + host="0.0.0.0", + port=os.getenv("STRUCT2GRAPH_PORT"), +) +async def execute_agent(input: Input): + """Execute triplet extraction from text file. + This function takes an Input object containing the input text and database connection information. + It uses the execute function from the struct2graph module to execute the graph query and returns the result. + + Args: + input (Input): An Input object with the input text + task (Input): type of task to perform index or query + + Returns: + dict: A dictionary with head, tail and type linking head and tail + """ + results = await loader.invoke(input) + logger.info(f"PASSING BACK {results}") + return {"result": results} + + +if __name__ == "__main__": + logger.info("OPEA Struct2Graph Microservice is starting...") + opea_microservices["opea_service@struct2graph"].start() diff --git a/comps/struct2graph/src/requirements.txt b/comps/struct2graph/src/requirements.txt new file mode 100644 index 0000000000..9a4eeb3dc0 --- /dev/null +++ b/comps/struct2graph/src/requirements.txt @@ -0,0 +1,19 @@ +docarray[full] +fastapi +hanging_threads +langchain +langchain-community +langchain-neo4j +neo4j +numpy +opentelemetry-api +opentelemetry-exporter-otlp +opentelemetry-sdk +pandas +prometheus_fastapi_instrumentator +psycopg2-binary +pyarrow +pydantic +shortuuid +sqlalchemy +uvicorn diff --git a/tests/struct2graph/data/test1.csv b/tests/struct2graph/data/test1.csv new file mode 100755 index 0000000000..5bf1c78dab --- /dev/null +++ b/tests/struct2graph/data/test1.csv @@ -0,0 +1,11 @@ +ID,Name,Age,City +1,Alice,25,New York +2,Bob,30,Los Angeles +3,Charlie,35,Chicago +4,Diana,28,Houston +5,Eve,22,Phoenix +6,Frank,40,Philadelphia +7,Grace,29,San Antonio +8,Hank,33,San Diego +9,Ivy,26,Dallas +10,Jack,31,San Jose diff --git a/tests/struct2graph/data/test1.json b/tests/struct2graph/data/test1.json new file mode 100755 index 0000000000..2b814f0f65 --- /dev/null +++ b/tests/struct2graph/data/test1.json @@ -0,0 +1,64 @@ +{ + "table": [ + { + "ID": 1, + "Name": "Alice", + "Age": 25, + "City": "New York" + }, + { + "ID": 2, + "Name": "Bob", + "Age": 30, + "City": "Los Angeles" + }, + { + "ID": 3, + "Name": "Charlie", + "Age": 35, + "City": "Chicago" + }, + { + "ID": 4, + "Name": "Diana", + "Age": 28, + "City": "Houston" + }, + { + "ID": 5, + "Name": "Eve", + "Age": 22, + "City": "Phoenix" + }, + { + "ID": 6, + "Name": "Frank", + "Age": 40, + "City": "Philadelphia" + }, + { + "ID": 7, + "Name": "Grace", + "Age": 29, + "City": "San Antonio" + }, + { + "ID": 8, + "Name": "Hank", + "Age": 33, + "City": "San Diego" + }, + { + "ID": 9, + "Name": "Ivy", + "Age": 26, + "City": "Dallas" + }, + { + "ID": 10, + "Name": "Jack", + "Age": 31, + "City": "San Jose" + } + ] +} diff --git a/tests/struct2graph/example_from_file.py b/tests/struct2graph/example_from_file.py new file mode 100644 index 0000000000..3d9a78523a --- /dev/null +++ b/tests/struct2graph/example_from_file.py @@ -0,0 +1,103 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import os +import subprocess +import sys +from typing import Literal +from urllib.parse import quote + +import requests + +print("----------- Extract graph from CSV -----------") + +print("----------- Generating index ----------------") +os.environ["LOAD_FORMAT"] = "CSV" +load_format = os.getenv("LOAD_FORMAT") +CYPHER_CSV_CMD = "LOAD CSV WITH HEADERS FROM 'file:////test1.csv' AS row \ + CREATE (:Person {ID: toInteger(row.ID), Name: row.Name, Age: toInteger(row.Age), City: row.City});" + +print(f" CYPHER COMMAND USED:: {CYPHER_CSV_CMD} ") +STRUCT2GRAPH_PORT = os.getenv("STRUCT2GRAPH_PORT") +url = f"http://localhost:{STRUCT2GRAPH_PORT}/v1/struct2graph" +headers = {"accept": "application/json", "Content-Type": "application/json"} + +payload = {"input_text": "", "task": "Index", "cypher_cmd": CYPHER_CSV_CMD} + +try: + # Send the POST request + response = requests.post(url, headers=headers, json=payload) + response.raise_for_status() # Raise an exception for bad status codes + print("Request successful:", response.json()) +except requests.exceptions.RequestException as e: + print("Request failed:", e) + +print("----------- Loading graph completed Query ----------------") +print("----------- Issuing Query --------------------------------") +payload = {"input_text": "MATCH (p:Person {Name:'Alice'}) RETURN p", "task": "Query", "cypher_cmd": ""} + +try: + # Send the POST request + response = requests.post(url, headers=headers, json=payload) + response.raise_for_status() # Raise an exception for bad status codes + print("Request successful:", response.json()) +except requests.exceptions.RequestException as e: + print("Request failed:", e) + + +print("----------- Extract graph from JSON -----------") + +print("----------- Generating index ----------------") + +os.environ["LOAD_FORMAT"] = "JSON" +load_format = os.getenv("LOAD_FORMAT") +CYPHER_JSON_CMD = " \ +CALL apoc.load.json('file:///test1.json') YIELD value \ +UNWIND value.table AS row \ +CREATE (:Person { \ + ID: row.ID, \ + Name: row.Name, \ + Age: row.Age, \ + City: row.City \ + }); \ + " + +print(f" CYPHER COMMAND USED:: {os.environ['CYPHER_JSON_CMD']}") + +url = f"http://localhost:{STRUCT2GRAPH_PORT}/v1/struct2graph" +headers = {"accept": "application/json", "Content-Type": "application/json"} + +payload = {"input_text": "", "task": "Index", "cypher_cmd": CYPHER_JSON_CMD} + +try: + # Send the POST request + response = requests.post(url, headers=headers, json=payload) + response.raise_for_status() # Raise an exception for bad status codes + print("Request successful:", response.json()) +except requests.exceptions.RequestException as e: + print("Request failed:", e) + + +print("----------- Loading graph completed Query ----------------") +print("----------- Issuing Query ----------------") +payload = {"input_text": "MATCH (n) RETURN n", "task": "Query", "cypher_cmd": ""} + +try: + # Send the POST request + response = requests.post(url, headers=headers, json=payload) + response.raise_for_status() # Raise an exception for bad status codes + print("Request successful:", response.json()) +except requests.exceptions.RequestException as e: + print("Request failed:", e) + + +print("----------- Issuing Query ----------------") +payload = {"input_text": "MATCH (p:Person {Name:'Alice'}) RETURN p", "task": "Query", "cypher_cmd": ""} + +print(f" Issuing query {payload}") + +response = requests.post(url, headers=headers, json=payload) +print(f"RESULT : {response}") +# Get response details +print(f"Status Code: {response.status_code}") +print(f"Response Body: {response.json()}") diff --git a/tests/struct2graph/test_struct2graph_opea.sh b/tests/struct2graph/test_struct2graph_opea.sh new file mode 100644 index 0000000000..a4b1ee923f --- /dev/null +++ b/tests/struct2graph/test_struct2graph_opea.sh @@ -0,0 +1,64 @@ +#!/bin/bash +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +WORKPATH=$(git rev-parse --show-toplevel) +echo $WORKPATH +TAG='latest' +LOG_PATH="$WORKPATH/comps/struct2graph/deployment/docker_compose" +source $WORKPATH/comps/struct2graph/src/setup_service_env.sh +STRUCT2GPAPH_PORT=8090 +ip_address=$(hostname -I | awk '{print $1}') +service_name="struct2graph" + +function build_docker_graph() { + echo "=================== START BUILD DOCKER ========================" + cd $WORKPATH + echo $(pwd) + docker build --no-cache -t opea/struct2graph:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/struct2graph/src/Dockerfile . + if [ $? -ne 0 ]; then + echo "opea/struct2graph built fail" + exit 1 + else + echo "opea/struct2graph built successful" + fi + echo "=================== END BUILD DOCKER ========================" +} + +function start_service() { + echo "=================== START SERVICE ========================" + cd $LOG_PATH + docker compose -f struct2graph-compose.yaml up ${service_name} -d > ${LOG_PATH}/start_services_with_compose.log + + sleep 10s + echo "=================== END SERVICE ========================" +} + +function validate_microservice() { + echo "=================== START VALIDATE ========================" + cd $WORKPATH/tests/struct2graph + python example_from_file.py + echo "=================== END VALIDATE ========================" +} + +function stop_docker() { + echo "=================== START STOP DOCKER ========================" + cd $LOG_PATH + docker compose -f struct2graph-compose.yaml down --remove-orphans + echo "=================== END STOP DOCKER ========================" +} + +function main() { + + stop_docker + + build_docker_graph + start_service + validate_microservice + + stop_docker + echo y | docker system prune + +} + +main