Skip to content

Commit

Permalink
fix(lint): critical bug with pylint usage (apache#131)
Browse files Browse the repository at this point in the history
The previous pylint configuration had issues, which prevented pylint from being triggered properly. 

This PR has modified the pylint configuration to fix the lingering issues and fixed residual pylint issues (ML LLM)

Details are as follows:
- fixed incubator-hugegraph-ai/style/code_format_and_analysis.sh
- fixed incubator-hugegraph-ai/hugegraph-ml/src/hugegraph_ml
- fixed incubator-hugegraph-ai/hugegraph-llm/src/hugegraph_llm


---------

Co-authored-by: root <[email protected]>
Co-authored-by: imbajin <[email protected]>
  • Loading branch information
3 people authored Dec 9, 2024
1 parent 672aeef commit 71b6261
Show file tree
Hide file tree
Showing 27 changed files with 140 additions and 135 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/pylint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ jobs:
pip install -r ./hugegraph-llm/requirements.txt
pip install -r ./hugegraph-ml/requirements.txt
pip install -r ./hugegraph-python-client/requirements.txt
- name: Check DGL version
run: |
python -c "import dgl; print(dgl.__version__)"
- name: Analysing the code with pylint
run: |
bash ./style/code_format_and_analysis.sh -p
11 changes: 5 additions & 6 deletions hugegraph-llm/src/hugegraph_llm/api/admin_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,13 @@
from hugegraph_llm.api.models.rag_requests import LogStreamRequest
from hugegraph_llm.api.models.rag_response import RAGResponse


# FIXME: line 31: E0702: Raising dict while only classes or instances are allowed (raising-bad-type)
def admin_http_api(router: APIRouter, log_stream):
@router.post("/logs", status_code=status.HTTP_200_OK)
async def log_stream_api(req: LogStreamRequest):
if os.getenv('ADMIN_TOKEN') != req.admin_token:
raise generate_response(RAGResponse(status_code=status.HTTP_403_FORBIDDEN, message="Invalid admin_token"))
else:
log_path = os.path.join("logs", req.log_file)
raise generate_response(RAGResponse(status_code=status.HTTP_403_FORBIDDEN, message="Invalid admin_token")) #pylint: disable=E0702
log_path = os.path.join("logs", req.log_file)

# Create a StreamingResponse that reads from the log stream generator
return StreamingResponse(log_stream(log_path), media_type="text/plain")
# Create a StreamingResponse that reads from the log stream generator
return StreamingResponse(log_stream(log_path), media_type="text/plain")
2 changes: 1 addition & 1 deletion hugegraph-llm/src/hugegraph_llm/config/config_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ class PromptData:
文本:
{question}
"""

#pylint: disable=C0301
# keywords_extract_prompt_EN = """
# Instruction:
# Please perform the following tasks on the text below:
Expand Down
45 changes: 22 additions & 23 deletions hugegraph-llm/src/hugegraph_llm/demo/rag_demo/admin_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def log_stream(log_path: str, lines: int = 125):
Stream the content of a log file like `tail -f`.
"""
try:
with open(log_path, 'r') as file:
with open(log_path, 'r', encoding='utf-8') as file:
buffer = deque(file, maxlen=lines)
for line in buffer:
yield line # Yield the initial lines
Expand All @@ -40,32 +40,32 @@ async def log_stream(log_path: str, lines: int = 125):
yield line
else:
await asyncio.sleep(0.1) # Non-blocking sleep
except FileNotFoundError:
raise Exception(f"Log file not found: {log_path}")
except FileNotFoundError as exc:
raise Exception(f"Log file not found: {log_path}") from exc
except Exception as e:
raise Exception(f"An error occurred while reading the log: {str(e)}")
raise Exception(f"An error occurred while reading the log: {str(e)}") from e


# Functions to read each log file
def read_llm_server_log(lines=250):
log_path = "logs/llm-server.log"
try:
with open(log_path, "r") as f:
with open(log_path, "r", encoding='utf-8') as f:
return ''.join(deque(f, maxlen=lines))
except FileNotFoundError:
log.critical(f"Log file not found: {log_path}")
log.critical("Log file not found: %s", log_path)
return "LLM Server log file not found."


# Functions to clear each log file
def clear_llm_server_log():
log_path = "logs/llm-server.log"
try:
with open(log_path, "w") as f:
with open(log_path, "w", encoding='utf-8') as f:
f.truncate(0) # Clear the contents of the file
return "LLM Server log cleared."
except Exception as e:
log.error(f"An error occurred while clearing the log: {str(e)}")
except Exception as e: #pylint: disable=W0718
log.error("An error occurred while clearing the log: %s", str(e))
return "Failed to clear LLM Server log."


Expand All @@ -78,24 +78,23 @@ def check_password(password, request: Request = None):
# Return logs and update visibility
llm_log = read_llm_server_log()
# Log the successful access with the IP address
log.info(f"Logs accessed successfully from IP: {client_ip}")
log.info("Logs accessed successfully from IP: %s", client_ip)
return (
llm_log,
gr.update(visible=True),
gr.update(visible=True),
gr.update(visible=True),
gr.update(visible=False)
)
else:
# Log the failed attempt with IP address
log.error(f"Incorrect password attempt from IP: {client_ip}")
return (
"",
gr.update(visible=False),
gr.update(visible=False),
gr.update(visible=False),
gr.update(value="Incorrect password. Access denied.", visible=True)
)
# Log the failed attempt with IP address
log.error("Incorrect password attempt from IP: %s", client_ip)
return (
"",
gr.update(visible=False),
gr.update(visible=False),
gr.update(visible=False),
gr.update(value="Incorrect password. Access denied.", visible=True)
)


def create_admin_block():
Expand Down Expand Up @@ -141,22 +140,22 @@ def create_admin_block():
variant="primary")

# Define what happens when the password is submitted
submit_button.click(
submit_button.click( #pylint: disable=E1101
fn=check_password,
inputs=[password_input],
outputs=[llm_server_log_output, hidden_row, clear_llm_server_button,
refresh_llm_server_button, error_message],
)

# Define what happens when the Clear LLM Server Log button is clicked
clear_llm_server_button.click(
clear_llm_server_button.click( #pylint: disable=E1101
fn=clear_llm_server_log,
inputs=[],
outputs=[llm_server_log_output],
)

# Define what happens when the Refresh LLM Server Log button is clicked
refresh_llm_server_button.click(
refresh_llm_server_button.click( #pylint: disable=E1101
fn=read_llm_server_log,
inputs=[],
outputs=[llm_server_log_output],
Expand Down
6 changes: 3 additions & 3 deletions hugegraph-llm/src/hugegraph_llm/demo/rag_demo/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def authenticate(credentials: HTTPAuthorizationCredentials = Depends(sec)):
headers={"WWW-Authenticate": "Bearer"},
)


# pylint: disable=C0301
def init_rag_ui() -> gr.Interface:
with gr.Blocks(
theme="default",
Expand All @@ -67,7 +67,7 @@ def init_rag_ui() -> gr.Interface:
"""
TODO: leave a general idea of the unresolved part
graph_config_input = textbox_array_graph_config
= [settings.graph_ip, settings.graph_port, settings.graph_name, graph_user, settings.graph_pwd, settings.graph_space]
= [settings.graph_ip, settings.graph_port, settings.graph_name, graph_user, settings.graph_pwd, settings.graph_space]
llm_config_input = textbox_array_llm_config
= if settings.llm_type == openai [settings.openai_api_key, settings.openai_api_base, settings.openai_language_model, settings.openai_max_tokens]
Expand Down Expand Up @@ -107,7 +107,7 @@ def refresh_ui_config_prompt() -> tuple:
prompt.default_question, prompt.answer_prompt, prompt.keywords_extract_prompt
)

hugegraph_llm_ui.load(fn=refresh_ui_config_prompt, outputs=[
hugegraph_llm_ui.load(fn=refresh_ui_config_prompt, outputs=[ #pylint: disable=E1101
textbox_array_graph_config[0],
textbox_array_graph_config[1],
textbox_array_graph_config[2],
Expand Down
Loading

0 comments on commit 71b6261

Please sign in to comment.