Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

This PR fix devika #603

Open
wants to merge 23 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions src/agents/coder/coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,10 @@ def validate_response(self, response: str) -> Union[List[Dict[str, str]], bool]:
if line.startswith("File: "):
if current_file and current_code:
result.append({"file": current_file, "code": "\n".join(current_code)})
current_file = line.split(":")[1].strip()
if "`" in line:
current_file = line.split("`")[1].strip()
else:
return False
current_code = []
code_block = False
elif line.startswith("```"):
Expand All @@ -71,10 +74,11 @@ def save_code_to_project(self, response: List[Dict[str, str]], project_name: str

for file in response:
file_path = os.path.join(self.project_dir, project_name, file['file'])
file_path_dir = os.path.dirname(file_path)
file_norm_path = os.path.normpath(file_path)
file_path_dir = os.path.dirname(file_norm_path)
os.makedirs(file_path_dir, exist_ok=True)

with open(file_path, "w", encoding="utf-8") as f:
with open(file_norm_path, "w+", encoding="utf-8") as f:
f.write(file["code"])

return file_path_dir
Expand Down
5 changes: 3 additions & 2 deletions src/agents/feature/feature.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,11 @@ def save_code_to_project(self, response: List[Dict[str, str]], project_name: str

for file in response:
file_path = os.path.join(self.project_dir, project_name, file['file'])
file_path_dir = os.path.dirname(file_path)
file_norm_path = os.path.normpath(file_path)
file_path_dir = os.path.dirname(file_norm_path)
os.makedirs(file_path_dir, exist_ok=True)

with open(file_path, "w", encoding="utf-8") as f:
with open(file_norm_path, "w+", encoding="utf-8") as f:
f.write(file["code"])

return file_path_dir
Expand Down
2 changes: 1 addition & 1 deletion src/llm/claude_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def inference(self, model_id: str, prompt: str) -> str:
}
],
model=model_id,
temperature=0
temperature=1
)

return message.content[0].text
2 changes: 1 addition & 1 deletion src/llm/gemini_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def __init__(self):
genai.configure(api_key=api_key)

def inference(self, model_id: str, prompt: str) -> str:
config = genai.GenerationConfig(temperature=0)
config = genai.GenerationConfig(temperature=1)
model = genai.GenerativeModel(model_id, generation_config=config)
# Set safety settings for the request
safety_settings = {
Expand Down
2 changes: 1 addition & 1 deletion src/llm/groq_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def inference(self, model_id: str, prompt: str) -> str:
}
],
model=model_id,
temperature=0
temperature=1
)

return chat_completion.choices[0].message.content
2 changes: 1 addition & 1 deletion src/llm/mistral_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,6 @@ def inference(self, model_id: str, prompt: str) -> str:
messages=[
ChatMessage(role="user", content=prompt.strip())
],
temperature=0
temperature=1
)
return chat_completion.choices[0].message.content
2 changes: 1 addition & 1 deletion src/llm/ollama_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ def inference(self, model_id: str, prompt: str) -> str:
response = self.client.generate(
model=model_id,
prompt=prompt.strip(),
options={"temperature": 0}
options={"temperature": 1}
)
return response['response']
2 changes: 1 addition & 1 deletion src/llm/openai_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,6 @@ def inference(self, model_id: str, prompt: str) -> str:
}
],
model=model_id,
temperature=0
temperature=1
)
return chat_completion.choices[0].message.content
19 changes: 18 additions & 1 deletion src/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,30 @@ def create_project(self, project: str):
project_state = Projects(project=project, message_stack_json=json.dumps([]))
session.add(project_state)
session.commit()
# Create project directory
project_dir = os.path.join(self.project_path, project)
os.makedirs(project_dir, exist_ok=True)

def delete_project(self, project: str):
with Session(self.engine) as session:
project_state = session.query(Projects).filter(Projects.project == project).first()
project_state = session.query(Projects).filter_by(project=project).first()
if project_state:
session.delete(project_state)
session.commit()
# Delete project directory
project_dir = os.path.join(self.project_path, project)
if os.path.exists(project_dir):
# Empty the directory
for root, dirs, files in os.walk(project_dir, topdown=False):
for file in files:
file_path = os.path.join(root, file)
os.remove(file_path)
for dir1 in dirs:
dir_path = os.path.join(root, dir1)
os.rmdir(dir_path)

# Remove the empty directory
os.rmdir(project_dir)

def add_message_to_project(self, project: str, message: dict):
with Session(self.engine) as session:
Expand Down
5 changes: 4 additions & 1 deletion src/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,9 @@ def update_latest_state(self, project: str, state: dict):
emit_agent("agent-state", state_stack)

def get_latest_state(self, project: str):
if not project:
# If no project is selected, return None immediately
return None
with Session(self.engine) as session:
agent_state = session.query(AgentStateModel).filter(AgentStateModel.project == project).first()
if agent_state:
Expand Down Expand Up @@ -174,4 +177,4 @@ def get_latest_token_usage(self, project: str):
if agent_state:
return json.loads(agent_state.state_stack_json)[-1]["token_usage"]
return 0