162 examples

Missing error handling

Errors occur without proper handling, leading to crashes.

[ FAQ1 ]

What is missing error handling?

Missing error handling occurs when a program fails to anticipate and manage errors or exceptional situations that can arise during execution. Without proper handling—such as using try-catch blocks or managing promise rejections—errors can propagate unchecked, causing applications to crash, fail silently, or behave unpredictably. Poor or absent error handling also hinders debugging, troubleshooting, and stability by obscuring underlying issues and preventing graceful recovery from error conditions.
[ FAQ2 ]

How to fix missing error handling in code

Missing error handling occurs when a program fails to anticipate and manage errors or exceptional situations that can arise during execution. Without proper handling—such as using try-catch blocks or managing promise rejections—errors can propagate unchecked, causing applications to crash, fail silently, or behave unpredictably. Poor or absent error handling also hinders debugging, troubleshooting, and stability by obscuring underlying issues and preventing graceful recovery from error conditions.
diff block
);
}
-export async function saveToCache(
- request: HeliconeProxyRequest,
- response: Response,
- responseBody: string[],
- cacheControl: string,
- settings: { bucketSize: number },
- cacheKv: KVNamespace,
- cacheSeed: string | null
-): Promise<void> {
- const expirationTtl = cacheControl.includes("max-age=")
- ? parseInt(cacheControl.split("max-age=")[1])
- : 0;
- const { freeIndexes } = await getMaxCachedResponses(
- request,
- settings,
- cacheKv,
- cacheSeed
- );
- if (freeIndexes.length > 0) {
- await cacheKv.put(
- await kvKeyFromRequest(request, freeIndexes[0], cacheSeed),
- JSON.stringify({
- headers: Object.fromEntries(response.headers.entries()),
- body: responseBody,
- }),
- {
- expirationTtl,
- }
+interface SaveToCacheOptions {
+ request: HeliconeProxyRequest;
+ response: Response;
+ responseBody: string[];
+ cacheControl: string;
+ settings: { bucketSize: number };
+ cacheKv: KVNamespace;
+ cacheSeed: string | null;
+}
+
+async function trySaveToCache(options: SaveToCacheOptions) {
+ try {
+ const {
+ request,
+ response,
+ responseBody,
+ cacheControl,
+ settings,
+ cacheKv,
+ cacheSeed,
+ } = options;
+ const expirationTtl = cacheControl.includes("max-age=")
+ ? parseInt(cacheControl.split("max-age=")[1])
+ : 0;
Greptile
greptile
logic: Missing error handling for invalid max-age format in Cache-Control header
suggested fix
const expirationTtl = cacheControl.includes("max-age=")
+ ? Number.isNaN(parseInt(cacheControl.split("max-age=")[1])) ? 0 : parseInt(cacheControl.split("max-age=")[1])
: 0;
diff block
+import { getZpiHeaders, ZPI_URL } from "../shared/api";
+
+interface Input {
+ /**
+ * token address on Ethereum
+ * required parameter
+ * example: 0x6b175474e89094c44da98b954eedeac495271d0f
+ */
+ tokenId: string;
+}
+
+export default async function (input: Input) {
+ const response = await fetch(`${ZPI_URL}asset/get-fungible-full-info/v1?fungibleId=${input.tokenId}&currency=usd`, {
+ headers: getZpiHeaders(),
+ });
+ const result = await response.json();
+ return result.data;
Greptile
greptile
logic: Missing error handling for failed API requests. Should wrap in try/catch and handle network errors. ```suggestion export default async function (input: Input) { + try { const response = await fetch(`${ZPI_URL}asset/get-fungible-full-info/v1?fungibleId=${input.tokenId}&currency=usd`, { headers: getZpiHeaders(), }); const result = await response.json(); return result.data; + } catch (error) { + throw new Error(`Failed to fetch token info: ${error.message}`); } ```
diff block
# XXX: return mtime?
return PackagePushResult(top_hash=TopHash(top_hash))
+
+
+class PackagerEvent(pydantic.BaseModel):
+ source_prefix: str
+ registry: str | None = None
+ package_name: str | None = None
+ metadata: dict[str, T.Any] | None = None
+ metadata_uri: str | None = None
+ workflow: str | None = None
+ commit_message: str | None = None
+
+ @pydantic.root_validator
+ def validate_metadata(cls, values):
+ metadata, metadata_uri = values["metadata"], values["metadata_uri"]
+ if metadata is not None and metadata_uri is not None:
+ raise ValueError("metadata and metadata_uri are mutually exclusive")
+ return values
+
+ def get_source_prefix_pk(self) -> PhysicalKey:
+ pk = PhysicalKey.from_url(self.source_prefix)
+ assert not pk.is_local() # XXX: error handling
+ return PhysicalKey(
+ pk.bucket,
+ pk.path if pk.path.endswith("/") or not pk.path else pk.path.rsplit("/", 1)[0] + "/",
+ None,
+ )
+
+ def get_metadata_uri_pk(self) -> PhysicalKey | None:
+ if self.metadata_uri is None:
+ return None
+ pk = PhysicalKey.from_url(rfc3986.uri_reference(self.metadata_uri).resolve_with(self.source_prefix).unsplit())
+ assert not pk.is_local() # XXX: error handling
+ return pk
+
+ # XXX: copied from shared
+ # XXX: is this sane here?
+ @property
+ def workflow_normalized(self):
+ # use default
+ if self.workflow is None:
+ return ...
+
+ # not selected
+ if self.workflow == "":
+ return None
+
+ return self.workflow
+
+
+def infer_pkg_name_from_prefix(prefix: str) -> str:
+ # XXX: check defaults are sane
+ default_prefix = "quilt-packager"
+ default_suffix = "pkg"
+
+ parts = [re.sub(r"[^\w-]", "-", p) for p in prefix.split("/") if p]
+ parts = ["_".join(parts[:-1]) or default_prefix, parts[-1] if parts else default_suffix]
+ return "/".join(parts)
+
+
+# XXX is this sane?
+@functools.cache
+def setup_user_boto_session_from_default():
+ global user_boto_session
+ user_boto_session = get_user_boto_session()
+
+
+def get_scratch_buckets() -> T.Dict[str, str]:
+ return json.load(s3.get_object(Bucket=SERVICE_BUCKET, Key="scratch-buckets.json")["Body"])
+
+
+def package_prefix_sqs(event, context):
+ import pprint
+
+ pprint.pprint(event)
+
+ assert len(event["Records"]) == 1 # XXX: does it really make sense to refuse processing multiple records?
+
+ for record in event["Records"]:
+ package_prefix(record["body"], context)
+
+
+def package_prefix(event, context):
+ params = PackagerEvent.parse_raw(event)
+
+ prefix_pk = params.get_source_prefix_pk()
+
+ pkg_name = infer_pkg_name_from_prefix(prefix_pk.path) if params.package_name is None else params.package_name
+
+ dst_bucket = params.registry or prefix_pk.bucket
+ registry_url = f"s3://{dst_bucket}"
+ package_registry = get_package_registry(registry_url)
+
+ metadata = params.metadata
+ if metadata_uri_pk := params.get_metadata_uri_pk():
+ metadata = json.load(s3.get_object(**S3ObjectSource.from_pk(metadata_uri_pk).boto_args)["Body"])
+ assert isinstance(metadata, dict) # XXX: does this make sense?
+
+ setup_user_boto_session_from_default()
+
+ pkg = quilt3.Package()
+ pkg.set_dir(".", str(prefix_pk), meta=metadata)
+ # TODO: check while listing objects
+ size_to_hash = 0
+ for i, (_, pkg_entry) in enumerate(pkg.walk()):
+ if i > MAX_FILES_TO_HASH:
+ raise PkgpushException(
+ "TooManyFilesToHash",
+ {
+ "num_files": i,
+ "max_files": MAX_FILES_TO_HASH,
+ },
+ )
+ assert isinstance(pkg_entry.size, int)
+ size_to_hash += pkg_entry.size
+ if size_to_hash > MAX_BYTES_TO_HASH:
+ raise PkgpushException(
+ "PackageTooLargeToHash",
+ {
+ "size": size_to_hash,
+ "max_size": MAX_BYTES_TO_HASH,
+ },
+ )
+ pkg._validate_with_workflow(
+ registry=package_registry,
+ workflow=params.workflow_normalized,
+ name=pkg_name,
+ message=params.commit_message,
+ )
+ calculate_pkg_hashes(pkg, get_scratch_buckets())
+ pkg._build(
+ name=pkg_name,
+ registry=registry_url,
+ message=params.commit_message,
+ )
Greptile
greptile
logic: missing error handling and return value validation for pkg._build()
diff block
+import { Action, ActionPanel, Icon, List, useNavigation } from "@raycast/api";
+import { useState } from "react";
+import { DestructiveAction, PinAction } from "./actions";
+import { PreferencesActionSection } from "./actions/preferences";
+import { DEFAULT_MODEL, useModel } from "./hooks/useModel";
+import { Model as ModelType } from "./type";
+import { ModelForm } from "./views/model/form";
+import { ModelListItem, ModelListView } from "./views/model/list";
+import { ExportData, ImportData } from "./utils/import-export";
+import { ImportForm } from "./views/import-form";
+import { COMMAND_MODEL_PREFIX } from "./hooks/useCommand";
+
+export default function Model() {
+ const models = useModel();
+ const [searchText, setSearchText] = useState<string>("");
+ const [selectedModelId, setSelectedModelId] = useState<string | null>(null);
+
+ const { push } = useNavigation();
+
+ const getActionPanel = (model: ModelType) => (
+ <ActionPanel>
+ {!model.id.startsWith(COMMAND_MODEL_PREFIX) && (
+ <Action
+ title={"Edit Model"}
+ shortcut={{ modifiers: ["cmd"], key: "e" }}
+ icon={Icon.Text}
+ onAction={() => push(<ModelForm model={model} use={{ models }} />)}
+ />
+ )}
+ <Action
+ title={"Create Model"}
+ shortcut={{ modifiers: ["cmd"], key: "n" }}
+ icon={Icon.Text}
+ onAction={() => push(<ModelForm name={searchText} use={{ models }} />)}
+ />
+ <ActionPanel.Section title="Actions">
+ <Action title={"Export Models"} icon={Icon.Upload} onAction={() => ExportData(models.data, "Models")} />
+ <Action
+ title={"Import Models"}
+ icon={Icon.Download}
+ onAction={() =>
+ push(
+ <ImportForm
+ moduleName="Models"
+ onSubmit={async (file) => {
+ ImportData<ModelType>("models", file).then((data) => {
+ models.setModels(data.reduce((acc, model) => ({ ...acc, [model.id]: model }), {}));
+ });
+ }}
Greptile
greptile
logic: Missing error handling in the ImportData promise. Consider adding a catch block to handle errors and notify the user.
suggested fix
onSubmit={async (file) => {
+ try {
+ const data = await ImportData<ModelType>("models", file);
models.setModels(data.reduce((acc, model) => ({ ...acc, [model.id]: model }), {}));
+ } catch (error) {
+ showFailureToast(error, { title: "Could not import models" });
+ }
}}
diff block
return channel_permissions
+def _get_slack_document_access(
+ cc_pair: ConnectorCredentialPair,
+ channel_permissions: dict[str, ExternalAccess],
+ callback: IndexingHeartbeatInterface | None,
+) -> Generator[DocExternalAccess, None, None]:
+ slack_connector = SlackConnector(**cc_pair.connector.connector_specific_config)
+ slack_connector.load_credentials(cc_pair.credential.credential_json)
+
+ slim_doc_generator = slack_connector.retrieve_all_slim_documents(callback=callback)
+
+ for doc_metadata_batch in slim_doc_generator:
+ for doc_metadata in doc_metadata_batch:
+ if doc_metadata.perm_sync_data is None:
+ continue
+ channel_id = doc_metadata.perm_sync_data["channel_id"]
+ yield DocExternalAccess(
+ external_access=channel_permissions[channel_id],
+ doc_id=doc_metadata.id,
Greptile
greptile
logic: Missing error handling for non-existent channel_id in channel_permissions dict. Could raise KeyError.
suggested fix
channel_id = doc_metadata.perm_sync_data["channel_id"]
+ if channel_id not in channel_permissions:
+ logger.warning(f"Channel {channel_id} not found in permissions map, skipping")
continue
yield DocExternalAccess(
external_access=channel_permissions[channel_id],
doc_id=doc_metadata.id,
diff block
+import { $fetch } from "ofetch";
+import type { ApiResponse } from "../types/modules.ts";
+
+type Input = {
+ /**
+ * The category to filter modules by
+ * @example "ui"
+ * @example "cms"
+ * @example "seo"
+ */
+ category: string;
+};
+
+/**
+ * Fetch Nuxt modules filtered by category
+ *
+ * Use this tool when:
+ * - You need to find modules in a specific category
+ * - You want to recommend modules from a particular category
+ * - You need a more focused list of modules than the complete list
+ *
+ * @returns The list of modules in the specified category
+ */
+export default async function tool(input: Input) {
+ const { modules } = await $fetch<ApiResponse>(`https://api.nuxt.com/modules?category=${input.category}`);
+ return modules;
Greptile
greptile
logic: Missing error handling for failed API requests. Consider wrapping in try-catch and using showFailureToast from @raycast/utils ```suggestion export default async function tool(input: Input) { + try { const { modules } = await $fetch<ApiResponse>(`https://api.nuxt.com/modules?category=${input.category}`); return modules; + } catch (error) { + throw new Error(`Failed to fetch modules: ${error.message}`); + } ```
diff block
}
}
})
+
+export const useRegister = createPostMutationHook({
+ endpoint: RegisterCommand.TSQ_url,
+ bodySchema: RegisterCommand.RequestSchema,
+ responseSchema: RegisterCommand.ResponseSchema,
+ rMutationParams: {
+ onSuccess: (data) => {
+ notifications.show({
+ title: 'Register',
+ message: 'User registered successfully',
+ color: 'green'
+ })
+ setToken({ token: data.accessToken })
+ }
+ }
Greptile
greptile
logic: Missing error handling for registration failures. The login hook has error notifications, but register doesn't handle errors the same way.
suggested fix
export const useRegister = createPostMutationHook({
endpoint: RegisterCommand.TSQ_url,
bodySchema: RegisterCommand.RequestSchema,
responseSchema: RegisterCommand.ResponseSchema,
rMutationParams: {
onSuccess: (data) => {
notifications.show({
title: 'Register',
message: 'User registered successfully',
color: 'green'
})
setToken({ token: data.accessToken })
+ },
+ onError: (error) => {
notifications.show({
title: 'Register',
+ message: error.message,
+ color: 'red'
})
}
}
diff block
}
} catch (error) {
didError = true;
- console.log('\n\n' + error.message);
- console.log(file);
}
});
Greptile
greptile
logic: Missing error handling - script should exit with non-zero status when errors occur to properly signal failure in CI environments
suggested fix
+});
+if (didError) {
+ process.exit(1);
+}
diff block
+import { AI, getPreferenceValues } from "@raycast/api";
+import { fetchPowerFlowRealtimeData } from "../api";
+
+interface Preferences {
+ baseUrl: string;
+}
+
+/**
+ * Analyzes the current state of the Fronius solar system using AI
+ *
+ * This tool fetches real-time data directly from the Fronius inverter API and uses
+ * Raycast's AI capabilities to provide meaningful insights about the system's performance.
+ * The analysis includes information about current production, consumption patterns,
+ * grid interaction, and battery status (if available).
+ *
+ * The tool is designed to help users understand their solar system's current state
+ * at a glance and identify potential optimization opportunities or issues.
+ *
+ * @returns {Promise<string>} A detailed analysis of the system state with insights and recommendations
+ */
+export default async function analyzeSystem(): Promise<string> {
+ // Get the base URL from user preferences
+ const { baseUrl } = getPreferenceValues<Preferences>();
+
+ // Fetch the latest power flow data directly from the Fronius API
+ const powerResponse = await fetchPowerFlowRealtimeData(baseUrl);
+ const site = powerResponse.Body.Data.Site;
Greptile
greptile
logic: Missing error handling for API call. Should wrap in try/catch to handle network failures or invalid responses. ```suggestion + try { const powerResponse = await fetchPowerFlowRealtimeData(baseUrl); const site = powerResponse.Body.Data.Site; + } catch (error) { + throw new Error(`Failed to fetch power flow data: ${error.message}`); } ```
diff block
+import { AI, getPreferenceValues } from "@raycast/api";
+import { fetchPowerFlowRealtimeData } from "../api";
+
+interface Preferences {
+ baseUrl: string;
+}
+
+/**
+ * Provides optimization suggestions for the Fronius solar system
+ *
+ * This tool fetches real-time data from the Fronius API, calculates key performance
+ * metrics, and uses Raycast's AI capabilities to generate practical optimization
+ * suggestions tailored to the current system state.
+ *
+ * The suggestions focus on ways to improve energy efficiency, maximize self-consumption,
+ * optimize battery usage (if present), and enhance overall system performance.
+ * The recommendations are designed to be actionable by homeowners without requiring
+ * technical expertise or professional assistance.
+ *
+ * The tool uses current power production, estimated daily production, peak power,
+ * and grid export percentage to provide context-aware recommendations.
+ *
+ * @returns {Promise<string>} Practical optimization suggestions based on current system performance
+ */
+export default async function optimizationSuggestions(): Promise<string> {
+ // Get the base URL from user preferences
+ const { baseUrl } = getPreferenceValues<Preferences>();
+
+ // Fetch the latest power flow data directly from the Fronius API
+ const powerResponse = await fetchPowerFlowRealtimeData(baseUrl);
+ const site = powerResponse.Body.Data.Site;
Greptile
greptile
logic: Missing error handling for failed API requests. Should wrap in try/catch to handle network failures gracefully. ```suggestion + try { const powerResponse = await fetchPowerFlowRealtimeData(baseUrl); const site = powerResponse.Body.Data.Site; + } catch (error) { + throw new Error(`Failed to fetch power flow data: ${error.message}`); } ```
diff block
},
},
request: {
- url: 'https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart',
+ url: 'https://www.googleapis.com/drive/v3/files',
method: 'POST',
headers: (params) => ({
Authorization: `Bearer ${params.accessToken}`,
- 'Content-Type': 'multipart/related; boundary=boundary',
+ 'Content-Type': 'application/json',
}),
body: (params) => {
const metadata = {
- name: params.fileName,
- ...(params.folderId ? { parents: [params.folderId] } : {}),
+ name: params.fileName, // Important: Always include the filename in metadata
+ mimeType: params.mimeType || 'text/plain',
+ ...(params.folderId && params.folderId.trim() !== '' ? { parents: [params.folderId] } : {}),
}
- const mimeType = params.mimeType || 'text/plain'
+ if (params.folderSelector) {
+ metadata.parents = [params.folderSelector]
+ }
- const body = `--boundary
-Content-Type: application/json; charset=UTF-8
+ return metadata
+ },
+ },
+ transformResponse: async (response: Response, params?: GoogleDriveToolParams) => {
+ try {
+ const data = await response.json()
-${JSON.stringify(metadata)}
+ if (!response.ok) {
+ logger.error('Failed to create file in Google Drive', {
+ status: response.status,
+ statusText: response.statusText,
+ data,
+ })
+ throw new Error(data.error?.message || 'Failed to create file in Google Drive')
+ }
---boundary
-Content-Type: ${mimeType}
+ // Now upload content to the created file
+ const fileId = data.id
+ const requestedMimeType = params?.mimeType || 'text/plain'
+ const authHeader =
+ response.headers.get('Authorization') || `Bearer ${params?.accessToken || ''}`
-${params.content}
---boundary--`
+ // For Google Workspace formats, use the appropriate source MIME type for content upload
+ const uploadMimeType = GOOGLE_WORKSPACE_MIME_TYPES.includes(requestedMimeType)
+ ? SOURCE_MIME_TYPES[requestedMimeType] || 'text/plain'
+ : requestedMimeType
- return { body }
- },
- },
- transformResponse: async (response: Response) => {
- const data = await response.json()
+ logger.info('Uploading content to file', {
+ fileId,
+ fileName: params?.fileName,
+ requestedMimeType,
+ uploadMimeType,
+ })
- if (!response.ok) {
- throw new Error(data.error?.message || 'Failed to upload file to Google Drive')
- }
+ const uploadResponse = await fetch(
+ `https://www.googleapis.com/upload/drive/v3/files/${fileId}?uploadType=media`,
+ {
+ method: 'PATCH',
+ headers: {
+ Authorization: authHeader,
+ 'Content-Type': uploadMimeType,
+ },
+ body: params?.content || '',
+ }
+ )
- return {
- success: true,
- output: {
- file: {
- id: data.id,
- name: data.name,
- mimeType: data.mimeType,
- webViewLink: data.webViewLink,
- webContentLink: data.webContentLink,
- size: data.size,
- createdTime: data.createdTime,
- modifiedTime: data.modifiedTime,
- parents: data.parents,
+ if (!uploadResponse.ok) {
+ const uploadError = await uploadResponse.json()
+ logger.error('Failed to upload content to file', {
+ status: uploadResponse.status,
+ statusText: uploadResponse.statusText,
+ error: uploadError,
+ })
+ throw new Error(uploadError.error?.message || 'Failed to upload content to file')
+ }
+
+ // For Google Workspace documents, update the name again to ensure it sticks after conversion
+ if (GOOGLE_WORKSPACE_MIME_TYPES.includes(requestedMimeType)) {
+ logger.info('Updating file name to ensure it persists after conversion', {
+ fileId,
+ fileName: params?.fileName,
+ })
+
+ const updateNameResponse = await fetch(
+ `https://www.googleapis.com/drive/v3/files/${fileId}`,
+ {
+ method: 'PATCH',
+ headers: {
+ Authorization: authHeader,
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ name: params?.fileName,
+ }),
+ }
+ )
+
+ if (!updateNameResponse.ok) {
+ logger.warn('Failed to update filename after conversion, but content was uploaded', {
+ status: updateNameResponse.status,
+ statusText: updateNameResponse.statusText,
+ })
+ }
+ }
+
+ // Get the final file data
+ const finalFileResponse = await fetch(
+ `https://www.googleapis.com/drive/v3/files/${fileId}?fields=id,name,mimeType,webViewLink,webContentLink,size,createdTime,modifiedTime,parents`,
+ {
+ headers: {
+ Authorization: authHeader,
+ },
+ }
+ )
Greptile
greptile
logic: missing error handling for finalFileResponse - should check response.ok ```suggestion const finalFileResponse = await fetch( `https://www.googleapis.com/drive/v3/files/${fileId}?fields=id,name,mimeType,webViewLink,webContentLink,size,createdTime,modifiedTime,parents`, { headers: { Authorization: authHeader, }, } ) + if (!finalFileResponse.ok) { + logger.error('Failed to get final file data', { + status: finalFileResponse.status, + statusText: finalFileResponse.statusText, }) + throw new Error('Failed to get final file data') } ```
diff block
+import { Color, Icon, Image, Keyboard, List, Toast, confirmAlert, showToast } from "@raycast/api";
+import { exec } from "child_process";
+import * as sudo from "sudo-prompt";
+
+export default class Disk {
+ number: number;
+ identifier: string;
+ size: string;
+ name: string;
+ details: string;
+ mountStatus: string;
+ type: string;
+ isWhole: boolean;
+ mountPoint: string | null;
+
+ constructor(number: number, type: string, identifier: string, name: string, size: string) {
+ this.number = number;
+ this.identifier = identifier;
+ this.name = name;
+ this.size = size;
+ this.type = type;
+ this.mountPoint = null;
+ this.details = "Initializing...";
+ this.mountStatus = "Initializing...";
+ this.isWhole = false;
+ }
+
+ getActions(postFunction: (type: string) => void): {
+ title: string;
+ shortcut?: Keyboard.Shortcut;
+ icon: Image.ImageLike;
+ onAction: () => void;
+ }[] {
+ const action = (
+ title: string,
+ shortcut: Keyboard.Shortcut,
+ icon: Image.ImageLike,
+ method: "mount" | "unmount" | "eject" | "revealInFinder" | "showDetailCustomTerminal"
+ ) => ({
+ title,
+ shortcut,
+ icon,
+ onAction: () => {
+ this[method]().finally(
+ () =>
+ (method === "mount" || method === "unmount" || method === "eject") &&
+ postFunction(method === "eject" ? "DiskRefresh" : "DiskUpdate")
+ );
+ },
+ });
+
+ const failureAction = (title: string, icon?: Image.ImageLike, message?: string) => ({
+ title,
+ icon: Icon.Warning,
+ onAction: () =>
+ showToast({
+ style: Toast.Style.Failure,
+ title: `${this.identifier} ${title}`,
+ message,
+ }),
+ });
+
+ switch (this.mountStatus) {
+ case "Mounted":
+ return [
+ action("Unmount Volume", { modifiers: ["cmd"], key: "e" }, Icon.Eject, "unmount"),
+ action("Eject Full Drive", { modifiers: ["opt"], key: "e" }, Icon.Eject, "eject"),
+ action("Reveal in Finder", { modifiers: ["cmd"], key: "f" }, Icon.Eye, "revealInFinder"),
+ action("Terminal Info", { modifiers: ["cmd"], key: "t" }, Icon.Info, "showDetailCustomTerminal"),
+ ];
+ case "Unmounted":
+ return [
+ action("Mount Volume", { modifiers: ["cmd"], key: "e" }, Icon.ArrowDown, "mount"),
+ action("Eject Full Drive", { modifiers: ["opt"], key: "e" }, Icon.Eject, "eject"),
+ action("Terminal Info", { modifiers: ["cmd"], key: "t" }, Icon.Info, "showDetailCustomTerminal"),
+ ];
+ case "Whole":
+ return [
+ action("Unmount All Volumes", { modifiers: ["cmd"], key: "e" }, Icon.Eject, "unmount"),
+ action("Mount All Volumes", { modifiers: ["cmd", "shift"], key: "e" }, Icon.ArrowDown, "mount"),
+ action("Eject Drive", { modifiers: ["opt"], key: "e" }, Icon.Eject, "eject"),
+ action("Terminal Info", { modifiers: ["cmd"], key: "t" }, Icon.Info, "showDetailCustomTerminal"),
+ ];
+ case "Unmountable":
+ return [
+ action("Eject", { modifiers: ["opt"], key: "e" }, Icon.Eject, "eject"),
+ action("Terminal Info", { modifiers: ["cmd"], key: "t" }, Icon.Info, "showDetailCustomTerminal"),
+ failureAction("Unmountable"),
+ ];
+ case "Container":
+ return [
+ action("Eject All Volumes", { modifiers: ["opt"], key: "e" }, Icon.Eject, "eject"),
+ action("Terminal Info", { modifiers: ["cmd"], key: "t" }, Icon.Info, "showDetailCustomTerminal"),
+ ];
+ case "Timed Out":
+ return [
+ action("Unmount Disk", { modifiers: ["cmd"], key: "e" }, Icon.Eject, "unmount"),
+ action("Mount Disk", { modifiers: ["cmd", "shift"], key: "e" }, Icon.ArrowDown, "mount"),
+ action("Eject Disk", { modifiers: ["opt"], key: "e" }, Icon.Eject, "eject"),
+ action("Info in Custom Terminal", { modifiers: ["cmd"], key: "t" }, Icon.Info, "showDetailCustomTerminal"),
+ ];
+ default:
+ return [
+ failureAction("Mountability Unknown", "Shouldn't happen. Try reloading or so"),
+ action("Unmount Disk", { modifiers: ["cmd"], key: "e" }, Icon.Eject, "unmount"),
+ action("Mount Disk", { modifiers: ["cmd", "shift"], key: "e" }, Icon.ArrowDown, "mount"),
+ action("Terminal Info", { modifiers: ["cmd"], key: "t" }, Icon.Info, "showDetailCustomTerminal"),
+ ];
+ }
+ }
+
+ async showDetailCustomTerminal() {
+ const command = `diskutil info ${this.identifier}`;
+
+ // Execute AppleScript to open a new Terminal window and run the command
+ const fullCommand = `
+ osascript -e 'tell application "Terminal"
+ activate
+ do script "${command}"
+ delay 1000
+ set frontmost of the first window to true
+ end tell'
+ `;
+
+ showToast({
+ style: Toast.Style.Animated,
+ title: `Opening new terminal...`,
+ });
+ await new Promise((resolve) => setTimeout(resolve, 1000)); // delay
+ showToast({
+ style: Toast.Style.Success,
+ title: `Opened new terminal`,
+ });
+ Disk.execCommand(fullCommand);
Greptile
greptile
logic: Missing error handling for execCommand. This command could fail but the error is not caught.
suggested fix
+ try {
+ await Disk.execCommand(fullCommand);
+ } catch (error) {
showToast({
style: Toast.Style.Failure,
+ title: "Failed to open terminal",
+ message: String(error)
});
}
diff block
import { Command } from 'commander';
+import { existsSync } from 'fs';
+import { mkdir, writeFile } from 'fs/promises';
+import prompts from 'prompts';
+import { execSync } from 'child_process';
+import path from 'path';
+
+const DEV_DEPENDENCIES = ['tailwindcss', '@tailwindcss/postcss'];
+const DEPENDENCIES = [
+ '@phosphor-icons/react',
+ '@radix-ui/react-checkbox',
+ '@radix-ui/react-icons',
+ '@radix-ui/themes',
+ 'phosphor-react',
+ 'radix-ui',
+];
export const init = new Command()
.command('init')
.description('Initialize a new project')
- .action(() => {
- console.log('Initializing the project');
+ .action(async () => {
+ const { root } = await prompts([
+ {
+ type: 'text',
+ name: 'root',
+ message: 'Where is the root of your project?',
+ initial: './',
+ },
+ ]);
+ if (!existsSync(root)) {
+ console.error('The root directory does not exist.');
+ return;
+ }
+
+ const { installDeps } = await prompts({
+ name: 'installDeps',
+ type: 'confirm',
+ message: 'Do you want to install dependencies?',
+ initial: true,
+ });
+ if (!installDeps) {
+ console.error('Skipping dependencies. You will not be able to use the design system without additional setup.');
+ } else {
+ const { packageManager } = await prompts({
+ type: 'select',
+ name: 'packageManager',
+ message: 'Which package manager do you want to use?',
+ choices: [
+ // the value is the command to run to install a package
+ { title: 'npm', value: 'npm install' },
+ { title: 'yarn', value: 'yarn add' },
+ { title: 'pnpm', value: 'pnpm add' },
+ { title: 'bun', value: 'bun add' },
+ ],
+ });
+
+ console.log('installing dependencies...');
+ execSync(`${packageManager} -D ${DEV_DEPENDENCIES.join(' ')}`);
+ execSync(`${packageManager} ${DEPENDENCIES.join(' ')}`);
+ }
+
+ const { setupTailwind } = await prompts({
+ name: 'setupTailwind',
+ type: 'confirm',
+ message: 'Do you want to set up Tailwind?',
+ initial: true,
+ });
+
+ if (setupTailwind) {
+ console.log('Setting up Tailwind v4 for Next.js...');
+ await writeFile(
+ path.join(root, 'postcss.config.mjs'),
+ `
+/** @type {import('postcss-load-config').Config} */
+const config = {
+ plugins: {
+ '@tailwindcss/postcss': {},
+ },
+};
+
+export default config;
+`.trim(),
+ 'utf8'
+ );
+ }
+
+ const { setupStyles } = await prompts({
+ name: 'setupStyles',
+ type: 'confirm',
+ message: 'Do you want to set up the Bits of Good theme?',
+ initial: true,
+ });
+
+ if (!setupStyles) {
+ console.error('Skipping the Bits of Good theme setup. Your project may not look like the Design System Website.');
+ if (setupTailwind) {
+ console.error(
+ 'You will need to finish the Tailwind setup manually. Create a css file with `@import "tailwindcss"` in it, and make sure you import it into your src/app/layout.tsx or src/pages/_app.tsx.'
+ );
+ }
+ } else {
+ const { stylePath } = await prompts({
+ name: 'stylePath',
+ type: 'text',
+ message: 'Where should the BOG theme and tailwindcss stylesheet live?',
+ initial: 'src/styles/globals.css',
+ });
+
+ const response = await fetch(
+ 'https://raw.githubusercontent.com/GTBitsOfGood/design-system/refs/heads/main/src/styles/globals.css'
+ );
+ const styles = await response.text();
Greptile
greptile
style: Missing error handling for the fetch operation. If the network request fails or the GitHub URL is unavailable, this will throw an unhandled exception. ```suggestion + try { const response = await fetch( 'https://raw.githubusercontent.com/GTBitsOfGood/design-system/refs/heads/main/src/styles/globals.css' ); + if (!response.ok) { + throw new Error(`Failed to fetch styles: ${response.status} ${response.statusText}`); } const styles = await response.text(); + } catch (error) { + console.error('Failed to download the BOG theme:', error.message); + console.error('You may need to manually create the stylesheet.'); return; } ```
diff block
"created_by",
"last_modified_at",
"last_modified_by",
+ "recordings_matching_filters_count",
]
created_by = UserBasicSerializer(read_only=True)
last_modified_by = UserBasicSerializer(read_only=True)
+ def get_recordings_matching_filters_count(self, playlist: SessionRecordingPlaylist) -> int | None:
+ redis_client = get_client()
+ counts = redis_client.get(f"{PLAYLIST_COUNT_REDIS_PREFIX}{playlist.short_id}")
+ if counts:
+ count_data = json.loads(counts)
+ id_list = count_data.get("session_ids", None)
+ return len(id_list) if id_list else None
Greptile
greptile
logic: Missing error handling for JSON decode failures when reading from Redis.
suggested fix
+ try:
count_data = json.loads(counts)
id_list = count_data.get("session_ids", None)
return len(id_list) if id_list else None
+ except json.JSONDecodeError:
+ logger.error("Failed to decode playlist count data from Redis")
+ return None
diff block
import { HomePage } from "./home-page";
import { getModules } from "../utils/api";
import { modulesection, moduleitem } from "../utils/types";
-import { showRecent, getRecentModuleItems, getPinnedModuleItems } from "../utils/recent";
+import { showRecent, useModuleStore } from "../utils/store";
export const Modules = (props: { id: number; url: string }) => {
const [searchText, setSearchText] = useState<string>("");
const [modules, setModules] = useState<modulesection[] | undefined>();
const [isLoading, setIsLoading] = useState<boolean>(true);
+ const { refreshModuleItems, recentItems, pinnedItems } = useModuleStore();
+
useEffect(() => {
- const getItems = async () => {
- try {
- const modules = await getModules(props.id);
- for (const m of modules) {
- console.log(m);
- }
- setModules(modules);
- } catch {
- setModules(undefined);
- }
+ getModules(props.id).then((modules) => {
+ setModules(modules);
setIsLoading(false);
- };
- getItems();
+ });
Greptile
greptile
logic: Missing error handling - if getModules fails, modules will remain undefined but isLoading will be set to false
suggested fix
getModules(props.id).then((modules) => {
setModules(modules);
+ setIsLoading(false);
+ }).catch(() => {
+ setModules(undefined);
+ setIsLoading(false);
});
diff block
import { Command } from 'commander';
+import { existsSync } from 'fs';
+import { mkdir, writeFile } from 'fs/promises';
+import prompts from 'prompts';
+import { execSync } from 'child_process';
+import path from 'path';
+
+const DEV_DEPENDENCIES = ['tailwindcss', '@tailwindcss/postcss'];
+const DEPENDENCIES = [
+ '@phosphor-icons/react',
+ '@radix-ui/react-checkbox',
+ '@radix-ui/react-icons',
+ '@radix-ui/themes',
+ 'phosphor-react',
+ 'radix-ui',
+];
+const FONTS = [
+ 'visbyextrabold-webfont.woff2',
+ 'visbyextrabold-webfont.woff',
+ 'opensans-regular-webfont.woff2',
+ 'opensans-regular-webfont.woff',
+];
export const init = new Command()
.command('init')
.description('Initialize a new project')
- .action(() => {
- console.log('Initializing the project');
+ .action(async () => {
+ try {
+ const { root } = await prompts([
+ {
+ type: 'text',
+ name: 'root',
+ message: 'Where is the root of your project?',
+ initial: './',
+ },
+ ]);
+ if (!existsSync(root)) {
+ console.error('The root directory does not exist.');
+ return;
+ }
+
+ const { installDeps } = await prompts({
+ name: 'installDeps',
+ type: 'confirm',
+ message: 'Do you want to install dependencies?',
+ initial: true,
+ });
+ if (!installDeps) {
+ console.error('Skipping dependencies. You will not be able to use the design system without additional setup.');
+ } else {
+ const { packageManager } = await prompts({
+ type: 'select',
+ name: 'packageManager',
+ message: 'Which package manager do you want to use?',
+ choices: [
+ // the value is the command to run to install a package
+ { title: 'npm', value: 'npm install' },
+ { title: 'yarn', value: 'yarn add' },
+ { title: 'pnpm', value: 'pnpm add' },
+ { title: 'bun', value: 'bun add' },
+ ],
+ });
+
+ if (packageManager) {
+ console.log('installing dependencies...');
+ execSync(`cd ${root} && ${packageManager} -D ${DEV_DEPENDENCIES.join(' ')}`);
+ execSync(`cd ${root} && ${packageManager} ${DEPENDENCIES.join(' ')}`);
+ } else {
+ console.error('Package manager selection was cancelled. Dependencies not installed.');
+ }
+ }
+
+ const { setupTailwind } = await prompts({
+ name: 'setupTailwind',
+ type: 'confirm',
+ message: 'Do you want to set up Tailwind v4?',
+ initial: true,
+ });
+
+ if (setupTailwind) {
+ console.log('Setting up Tailwind v4 for Next.js...');
+ await writeFile(
+ path.join(root, 'postcss.config.mjs'),
+ `
+/** @type {import('postcss-load-config').Config} */
+const config = {
+ plugins: {
+ '@tailwindcss/postcss': {},
+ },
+};
+
+export default config;
+`.trim(),
+ 'utf8'
+ );
+ }
+
+ const { setupStyles } = await prompts({
+ name: 'setupStyles',
+ type: 'confirm',
+ message: 'Do you want to set up the Bits of Good theme?',
+ initial: true,
+ });
+
+ if (!setupStyles) {
+ console.error(
+ 'Skipping the Bits of Good theme setup. Your project may not look like the Design System Website.'
+ );
+ if (setupTailwind) {
+ console.error(
+ 'You will need to finish the Tailwind setup manually. Create a css file with `@import "tailwindcss"` in it, and make sure you import it into your src/app/layout.tsx or src/pages/_app.tsx.'
+ );
+ }
+ } else {
+ const { stylePath } = await prompts({
+ name: 'stylePath',
+ type: 'text',
+ message:
+ "Where should the global stylesheet containing the BoG theme live? (input the file name relative to your project's root directory)",
+ initial: 'src/styles/globals.css',
+ });
+
+ const response = await fetch(
+ 'https://raw.githubusercontent.com/GTBitsOfGood/design-system/refs/heads/main/src/styles/globals.css'
+ );
+ const styles = await response.text();
Greptile
greptile
logic: Missing error handling for the fetch operation. If the network request fails, it will throw an unhandled error. ```suggestion const response = await fetch( 'https://raw.githubusercontent.com/GTBitsOfGood/design-system/refs/heads/main/src/styles/globals.css' ); + if (!response.ok) { + throw new Error(`Failed to download stylesheet, status: ${response.status}`); } const styles = await response.text(); ```
diff block
and "pull_request" in payload["issue"]
):
logger.info("Handling new comment")
- return handle_new_comment(payload)
+ # Get repository details from payload
+ repo_full_name = payload["repository"]["full_name"]
+ owner, repo = repo_full_name.split("/")
Greptile
greptile
logic: Missing error handling for split() operation. Could raise ValueError if repo_full_name doesn't contain '/'.
suggested fix
repo_full_name = payload["repository"]["full_name"]
+ try:
owner, repo = repo_full_name.split("/")
+ except ValueError:
+ logger.error(f"Invalid repository name format: {repo_full_name}")
+ return Response("Invalid repository name", status=400)
diff block
const { lat, lng } = geocodeData.results[0].geometry.location;
// Get Street View metadata to check if Street View is available
- const metadataUrl = `https://maps.googleapis.com/maps/api/streetview/metadata?location=${lat},${lng}&key=${GOOGLE_API_KEY}`;
+ const metadataUrl = `https://maps.googleapis.com/maps/api/streetview/metadata?location=${lat},${lng}&key=${NEXT_PUBLIC_GOOGLE_API_KEY_MAPS}`;
const metadataResponse = await fetch(metadataUrl);
const metadata = await metadataResponse.json();
Greptile
greptile
logic: Missing error handling for failed metadata fetch. Add response.ok check before parsing JSON.
suggested fix
+ const metadataResponse = await fetch(metadataUrl);
+ if (!metadataResponse.ok) {
+ return NextResponse.json(
+ { error: 'Failed to fetch Street View metadata' },
+ { status: metadataResponse.status }
+ );
+ }
+ const metadata = await metadataResponse.json();
diff block
);
}
- if (!GOOGLE_API_KEY) {
+ if (!NEXT_PUBLIC_GOOGLE_API_KEY_MAPS) {
return NextResponse.json(
{ error: 'Google API key is not configured' },
{ status: 500 }
);
}
// First, geocode the address to get coordinates
- const geocodeUrl = `https://maps.googleapis.com/maps/api/geocode/json?address=${encodeURIComponent(address)}&key=${GOOGLE_API_KEY}`;
+ const geocodeUrl = `https://maps.googleapis.com/maps/api/geocode/json?address=${encodeURIComponent(address)}&key=${NEXT_PUBLIC_GOOGLE_API_KEY_MAPS}`;
const geocodeResponse = await fetch(geocodeUrl);
const geocodeData = await geocodeResponse.json();
Greptile
greptile
logic: Missing error handling for failed fetch requests. Add response.ok check before parsing JSON.
suggested fix
+ const geocodeResponse = await fetch(geocodeUrl);
+ if (!geocodeResponse.ok) {
+ return NextResponse.json(
+ { error: 'Failed to geocode address' },
+ { status: geocodeResponse.status }
+ );
+ }
+ const geocodeData = await geocodeResponse.json();
diff block
defer client.Disconnect(ctx)
db := client.Database(database)
- collections, err := db.ListCollectionNames(ctx, bson.M{})
+ cursor, err := db.ListCollections(ctx, bson.M{})
if err != nil {
return nil, err
}
+ defer cursor.Close(ctx)
+
+ collections := []string{}
+ collectionTypes := make(map[string]string)
+ for cursor.Next(ctx) {
+ var collectionInfo bson.M
+ if err := cursor.Decode(&collectionInfo); err != nil {
+ return nil, err
+ }
+
+ name, _ := collectionInfo["name"].(string)
+ collectionType, _ := collectionInfo["type"].(string)
Greptile
greptile
logic: Missing error handling for type assertions. If name or collectionType cannot be asserted to string, this could cause issues. Use the second return value to check for successful type assertion.
suggested fix
+ name, ok := collectionInfo["name"].(string)
+ if !ok {
+ return nil, fmt.Errorf("collection name is not a string")
}
+ collectionType, ok := collectionInfo["type"].(string)
+ if !ok {
+ return nil, fmt.Errorf("collection type is not a string")
}
diff block
+import { $fetch } from "ofetch";
+import type { Module } from "../types/modules";
+
+type Input = {
+ /**
+ * The name of the module to fetch
+ * @example "ui"
+ * @example "algolia"
+ * @example "@nuxtjs/tailwindcss"
+ */
+ name: string;
+};
+
+/**
+ * Fetch information about a specific Nuxt module by name
+ *
+ * Use this tool when:
+ * - You need detailed information about a specific module
+ * - You want to check compatibility, maintainers, or other details of a module
+ * - You need to provide specific information about a module to the user
+ *
+ * @returns The module information if found
+ */
+export default async function tool(input: Input) {
+ const url = `https://api.nuxt.com/modules/${encodeURIComponent(input.name)}`;
+ return await $fetch<Module>(url);
Greptile
greptile
logic: Missing error handling for failed API requests. Consider wrapping in try/catch and using showFailureToast from @raycast/utils ```suggestion export default async function tool(input: Input) { const url = `https://api.nuxt.com/modules/${encodeURIComponent(input.name)}`; + try { return await $fetch<Module>(url); + } catch (error) { + throw new Error(`Failed to fetch module: ${error.message}`); + } ```
diff block
+import { customDomainRecordsState } from '~/pages/settings/workspace/states/customDomainRecordsState';
+import { useCheckCustomDomainValidRecordsMutation } from '~/generated/graphql';
+import { isDefined } from 'twenty-shared';
+import { useSetRecoilState } from 'recoil';
+
+export const useCheckCustomDomainValidRecords = () => {
+ const [checkCustomDomainValidRecords] =
+ useCheckCustomDomainValidRecordsMutation();
+
+ const setCustomDomainRecords = useSetRecoilState(customDomainRecordsState);
+
+ const checkCustomDomainRecords = () => {
+ setCustomDomainRecords((currentState) => ({
+ ...currentState,
+ loading: true,
+ }));
+ checkCustomDomainValidRecords({
+ onCompleted: (data) => {
+ if (isDefined(data.checkCustomDomainValidRecords)) {
+ setCustomDomainRecords({
+ loading: false,
+ customDomainRecords: data.checkCustomDomainValidRecords,
+ });
+ }
+ },
+ });
Greptile
greptile
logic: Missing error handling for the GraphQL mutation. If the request fails, the loading state will remain true indefinitely. Consider adding an onError callback to reset the loading state.
suggested fix
checkCustomDomainValidRecords({
onCompleted: (data) => {
if (isDefined(data.checkCustomDomainValidRecords)) {
setCustomDomainRecords({
loading: false,
customDomainRecords: data.checkCustomDomainValidRecords,
});
}
},
+ onError: () => {
setCustomDomainRecords((currentState) => ({
...currentState,
loading: false,
}));
},
});
diff block
return isChildTextEditable(oid);
},
);
+
+ ipcMain.handle(MainChannels.GET_TEMPLATE_NODE_PROPS, (e: Electron.IpcMainInvokeEvent, args) => {
+ const templateNode = args as TemplateNode;
+ return getTemplateNodeProps(templateNode);
+ });
Greptile
greptile
logic: Missing error handling for invalid/malformed template node argument. Consider adding validation or try/catch.
diff block
allow_origins=["http://localhost:3000", "https://teacherflow.ai", "https://www.teacherflow.ai"],
allow_credentials=True,
allow_methods=["POST", "GET", "DELETE", "OPTIONS"],
- allow_headers=["Content-Type"],
+ allow_headers=["Content-Type", "Range"],
)
# Constants
VIDEOS_DIR = Path("videos")
VIDEOS_DIR.mkdir(exist_ok=True)
-app.mount("/videos", StaticFiles(directory="videos"), name="videos")
+# Remove StaticFiles mount and add streaming endpoint
+@app.get("/videos/{video_filename}")
+async def stream_video(video_filename: str, request: Request):
+ """Stream video with support for range requests"""
+ video_path = VIDEOS_DIR / video_filename
+
+ range_header = request.headers.get("range")
+ response_data = get_video_file_response(video_path, range_header)
+
+ # Prepare headers
+ headers = {
+ "accept-ranges": response_data.accept_ranges,
+ "content-type": response_data.content_type,
+ "content-length": str(response_data.content_length)
+ }
+
+ if response_data.content_range:
+ headers["content-range"] = response_data.content_range
+
+ # Parse start and chunk size from content range
+ start = int(response_data.content_range.split(" ")[1].split("-")[0])
+ chunk_size = response_data.content_length
+ content = read_video_chunk(video_path, start, chunk_size)
Greptile
greptile
logic: Missing error handling if read_video_chunk fails (e.g., if file is deleted between checks)
diff block
+use anyhow::Result;
+use futures_util::StreamExt;
+use reqwest::{header, Client};
+use std::env;
+use tokio::sync::mpsc;
+
+use super::types::*;
+
+#[derive(Clone)]
+pub struct LiteLLMClient {
+ client: Client,
+ pub(crate) api_key: String,
+ pub(crate) base_url: String,
+}
+
+impl LiteLLMClient {
+ pub fn new(api_key: Option<String>, base_url: Option<String>) -> Self {
+ let api_key = api_key.or_else(|| env::var("LLM_API_KEY").ok()).expect(
+ "LLM_API_KEY must be provided either through parameter or environment variable",
+ );
+
+ let base_url = base_url
+ .or_else(|| env::var("LLM_BASE_URL").ok())
+ .unwrap_or_else(|| "http://localhost:8000".to_string());
+
+ let mut headers = header::HeaderMap::new();
+ headers.insert(
+ "Authorization",
+ header::HeaderValue::from_str(&format!("Bearer {}", api_key)).unwrap(),
+ );
+ headers.insert(
+ "Content-Type",
+ header::HeaderValue::from_static("application/json"),
+ );
+ headers.insert(
+ "Accept",
+ header::HeaderValue::from_static("application/json"),
+ );
+
+ let client = Client::builder()
+ .default_headers(headers)
+ .build()
+ .expect("Failed to create HTTP client");
+
+ Self {
+ client,
+ api_key,
+ base_url,
+ }
+ }
+
+ pub async fn chat_completion(
+ &self,
+ request: ChatCompletionRequest,
+ ) -> Result<ChatCompletionResponse> {
+ let url = format!("{}/chat/completions", self.base_url);
+
+ println!("DEBUG: Sending chat completion request to URL: {}", url);
+ println!(
+ "DEBUG: Request payload: {}",
+ serde_json::to_string_pretty(&request).unwrap()
+ );
+
+ let response = self
+ .client
+ .post(&url)
+ .json(&request)
+ .send()
+ .await?
+ .json::<ChatCompletionResponse>()
+ .await?;
+
+ // Print tool calls if present
+ if let Some(Message::Assistant {
+ tool_calls: Some(tool_calls),
+ ..
+ }) = response.choices.first().map(|c| &c.message)
+ {
+ println!("DEBUG: Tool calls in response:");
+ for tool_call in tool_calls {
+ println!("DEBUG: Tool Call ID: {}", tool_call.id);
+ println!("DEBUG: Tool Name: {}", tool_call.function.name);
+ println!("DEBUG: Tool Arguments: {}", tool_call.function.arguments);
+ }
+ }
+
+ println!(
+ "DEBUG: Received chat completion response: {}",
+ serde_json::to_string_pretty(&response).unwrap()
+ );
+
+ Ok(response)
+ }
+
+ pub async fn stream_chat_completion(
+ &self,
+ request: ChatCompletionRequest,
+ ) -> Result<mpsc::Receiver<Result<ChatCompletionChunk>>> {
+ let url = format!("{}/chat/completions", self.base_url);
+
+ println!(
+ "DEBUG: Starting stream chat completion request to URL: {}",
+ url
+ );
+ println!(
+ "DEBUG: Stream request payload: {}",
+ serde_json::to_string_pretty(&request).unwrap()
+ );
+
+ let mut stream = self
+ .client
+ .post(&url)
+ .json(&ChatCompletionRequest {
+ stream: Some(true),
+ ..request
+ })
+ .send()
+ .await?
+ .bytes_stream();
+
+ let (tx, rx) = mpsc::channel(100);
+
+ tokio::spawn(async move {
+ let mut buffer = String::new();
+ println!("DEBUG: Stream processing started");
+
+ while let Some(chunk_result) = stream.next().await {
+ match chunk_result {
+ Ok(chunk) => {
+ let chunk_str = String::from_utf8_lossy(&chunk);
+ println!("DEBUG: Received raw stream chunk: {}", chunk_str);
+ buffer.push_str(&chunk_str);
+
+ while let Some(pos) = buffer.find("\n\n") {
+ let line = buffer[..pos].trim().to_string();
+ buffer = buffer[pos + 2..].to_string();
+
+ if line.starts_with("data: ") {
+ let data = &line["data: ".len()..];
+ println!("DEBUG: Processing stream data: {}", data);
+ if data == "[DONE]" {
+ println!("DEBUG: Stream completed with [DONE] signal");
+ break;
+ }
+
+ if let Ok(response) =
+ serde_json::from_str::<ChatCompletionChunk>(data)
+ {
+ // Print tool calls if present in the stream chunk
+ if let Some(tool_calls) = &response.choices[0].delta.tool_calls
+ {
+ println!("DEBUG: Tool calls in stream chunk:");
+ for tool_call in tool_calls {
+ if let (Some(id), Some(function)) =
+ (tool_call.id.clone(), tool_call.function.clone())
+ {
+ println!("DEBUG: Tool Call ID: {}", id);
+ if let Some(name) = function.name {
+ println!("DEBUG: Tool Name: {}", name);
+ }
+ if let Some(arguments) = function.arguments {
+ println!(
+ "DEBUG: Tool Arguments: {}",
+ arguments
+ );
+ }
+ }
+ }
+ }
+
+ println!("DEBUG: Parsed stream chunk: {:?}", response);
+ let _ = tx.send(Ok(response)).await;
Greptile
greptile
logic: Missing error handling for tx.send() - should propagate the error if channel is closed
suggested fix
+ tx.send(Ok(response)).await.map_err(|e| anyhow::anyhow!("Channel closed: {}", e))?;
diff block
import { Controller, Get } from '@nestjs/common';
import { HealthCheck, HealthCheckService } from '@nestjs/terminus';
+import { HealthCacheService } from 'src/engine/core-modules/health/health-cache.service';
+
@Controller('healthz')
export class HealthController {
- constructor(private health: HealthCheckService) {}
+ constructor(
+ private health: HealthCheckService,
+ private healthCacheService: HealthCacheService,
+ ) {}
@Get()
@HealthCheck()
check() {
return this.health.check([]);
}
+
+ @Get('/message-channel-sync-job-by-status')
+ getMessageChannelSyncJobByStatusCounter() {
+ return this.healthCacheService.getMessageChannelSyncJobByStatusCounter();
+ }
Greptile
greptile
style: Missing error handling for potential cache service failures. Consider wrapping in try/catch.
diff block
+"use client";
+
+import { PropsWithChildren, useEffect, useState, type FC } from "react";
+import { CircleXIcon, FileIcon, PaperclipIcon } from "lucide-react";
+import {
+ AttachmentPrimitive,
+ ComposerPrimitive,
+ MessagePrimitive,
+ useAttachment,
+} from "@assistant-ui/react";
+import { useShallow } from "zustand/shallow";
+import {
+ Tooltip,
+ TooltipContent,
+ TooltipTrigger,
+} from "@/components/ui/tooltip";
+import {
+ Dialog,
+ DialogTitle,
+ DialogContent,
+ DialogTrigger,
+} from "@/components/ui/dialog";
+import { Avatar, AvatarImage, AvatarFallback } from "@/components/ui/avatar";
+import { TooltipIconButton } from "@/components/assistant-ui/tooltip-icon-button";
+import { TooltipProvider } from "@radix-ui/react-tooltip";
+
+const useFileSrc = (file: File | undefined) => {
+ const [src, setSrc] = useState<string | undefined>(undefined);
+
+ useEffect(() => {
+ if (!file) {
+ setSrc(undefined);
+ return;
+ }
+
+ const objectUrl = URL.createObjectURL(file);
+ setSrc(objectUrl);
+
+ return () => {
+ URL.revokeObjectURL(objectUrl);
+ };
+ }, [file]);
+
+ return src;
+};
+
+const useAttachmentSrc = () => {
+ const { file, src } = useAttachment(
+ useShallow((a): { file?: File; src?: string } => {
+ if (a.type !== "image") return {};
+ if (a.file) return { file: a.file };
+ const src = a.content?.filter((c) => c.type === "image")[0]?.image;
+ if (!src) return {};
+ return { src };
+ }),
+ );
+
+ return useFileSrc(file) ?? src;
+};
+
+type AttachmentPreviewProps = {
+ src: string;
+};
+
+const AttachmentPreview: FC<AttachmentPreviewProps> = ({ src }) => {
+ const [isLoaded, setIsLoaded] = useState(false);
+
+ return (
+ // eslint-disable-next-line @next/next/no-img-element
+ <img
+ src={src}
+ style={{
+ width: "auto",
+ height: "auto",
+ maxWidth: "75dvh",
+ maxHeight: "75dvh",
+ display: isLoaded ? "block" : "none",
+ overflow: "clip",
+ }}
+ onLoad={() => setIsLoaded(true)}
+ alt="Preview"
+ />
Greptile
greptile
logic: missing error handling for invalid/failed image loads - should handle onError event
diff block
const commandMenuPage = useRecoilValue(commandMenuPageState);
- const { title, Icon } = useRecoilValue(commandMenuPageInfoState);
+ const commandMenuNavigationStack = useRecoilValue(
+ commandMenuNavigationStackState,
+ );
const theme = useTheme();
const isCommandMenuV2Enabled = useIsFeatureEnabled(
FeatureFlagKey.IsCommandMenuV2Enabled,
);
+ const contextChips = commandMenuNavigationStack
+ .filter((page) => page.page !== CommandMenuPages.Root)
+ .map((page) => {
+ return {
+ Icons: [<page.pageIcon size={theme.icon.size.sm} />],
+ text: page.pageTitle,
+ };
+ });
Greptile
greptile
logic: Missing error handling if page.pageIcon is undefined. Could cause runtime errors.
suggested fix
const contextChips = commandMenuNavigationStack
.filter((page) => page.page !== CommandMenuPages.Root)
.map((page) => {
return {
+ Icons: page.pageIcon ? [<page.pageIcon size={theme.icon.size.sm} />] : [],
text: page.pageTitle,
};
});
diff block
+---
+description: Helpful for working with and building tools
+globs: */tools/*
+---
+# Tools Documentation and Guidelines
+
+## Overview
+This document outlines the architecture, patterns, and best practices for building tools in our system. Tools are modular, reusable components that provide specific functionality to our AI agents and application.
+
+## Core Architecture
+
+### ToolExecutor Trait
+The foundation of our tools system is the `ToolExecutor` trait. Any struct that wants to be used as a tool must implement this trait:
+
+```rust
+#[async_trait]
+pub trait ToolExecutor: Send + Sync {
+ type Output: Serialize + Send;
+ async fn execute(&self, tool_call: &ToolCall) -> Result<Self::Output>;
+ fn get_schema(&self) -> serde_json::Value;
+ fn get_name(&self) -> String;
+}
+```
+
+Key components:
+- `Output`: The return type of your tool (must be serializable)
+- `execute()`: The main function that implements your tool's logic
+- `get_schema()`: Returns the JSON schema describing the tool's interface
+- `get_name()`: Returns the tool's unique identifier
+
+## Tool Categories
+
+### 1. File Tools
+Our file tools provide a robust example of well-structured tool implementation. They handle:
+- File creation and modification
+- File searching and cataloging
+- File type-specific operations
+- User interaction with files
+
+Key patterns from file tools:
+- Modular organization by functionality
+- Clear separation of concerns
+- Type-safe file operations
+- Consistent error handling
+
+### 2. Interaction Tools
+Tools that manage user and system interactions.
+
+## Best Practices
+
+### 1. Tool Structure
+- Create a new module for each tool category
+- Implement the `ToolExecutor` trait
+- Use meaningful types for `Output`
+- Provide comprehensive error handling
+
+### 2. Schema Design
+- Document all parameters clearly
+- Use descriptive names for properties
+- Include example values where helpful
+- Validate input parameters
+
+### 3. Error Handling
+- Use `anyhow::Result` for flexible error handling
+- Provide meaningful error messages
+- Handle edge cases gracefully
+- Implement proper error propagation
+
+### 4. Testing
+- Write unit tests for each tool
+- Test edge cases and error conditions
+- Mock external dependencies
+- Ensure thread safety for async operations
+
+## Creating New Tools
+
+### Step 1: Define Your Tool
+```rust
+pub struct MyNewTool {
+ // Tool-specific fields
+}
+
+#[async_trait]
+impl ToolExecutor for MyNewTool {
+ type Output = YourOutputType;
+
+ async fn execute(&self, tool_call: &ToolCall) -> Result<Self::Output> {
+ // Implementation
+ }
+
+ fn get_schema(&self) -> Value {
+ // Schema definition
+ }
+
+ fn get_name(&self) -> String {
+ "my_new_tool".to_string()
+ }
+}
+```
+
+### Step 2: Schema Definition
+```json
+{
+ "name": "my_new_tool",
+ "description": "Clear description of what the tool does",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ // Tool parameters
+ },
+ "required": ["param1", "param2"]
+ }
+}
+```
+
+### Step 3: Integration
+1. Add your tool to the appropriate module
+2. Register it in the tool registry
+3. Add necessary tests
+4. Document usage examples
+
+## Common Patterns
+
+### Value Conversion
+Use `IntoValueTool` trait when you need to convert tool output to generic JSON:
+```rust
+my_tool.into_value_tool()
+```
+
+### File Operations
+For tools that modify files:
+- Implement `FileModificationTool` trait
+- Use `add_line_numbers` for better output formatting
+- Handle file permissions appropriately
+
+## Security Considerations
+1. Validate all input parameters
+2. Check file permissions before operations
+3. Sanitize file paths
+4. Handle sensitive data appropriately
+
+## Examples
+
+### File Tool Example
+```rust
+pub struct ReadFileTool {
+ base_path: PathBuf,
+}
+
+#[async_trait]
+impl ToolExecutor for ReadFileTool {
+ type Output = String;
+
+ async fn execute(&self, tool_call: &ToolCall) -> Result<Self::Output> {
+ // Implementation
+ }
Greptile
greptile
style: The ReadFileTool example is missing error handling for file system operations and path validation
diff block
+import { $fetch } from "ofetch";
+
+/**
+ * Fetch the available categories for Nuxt modules
+ *
+ * Use this tool when:
+ * - You need to know what categories of modules are available
+ * - You want to explore modules by category
+ * - You need to recommend module categories based on user requirements
+ *
+ * @returns The list of available module categories
+ */
+export default async function tool() {
+ const { categories } = await $fetch("https://api.nuxt.com/modules/categories");
+ return categories as string[];
Greptile
greptile
logic: Missing error handling for network failures or invalid API responses. Wrap in try/catch and use showFailureToast from @raycast/utils
suggested fix
export default async function tool() {
+ try {
const { categories } = await $fetch("https://api.nuxt.com/modules/categories");
return categories as string[];
+ } catch (error) {
+ showFailureToast("Failed to fetch module categories", error);
+ return [];
+ }
diff block
+import { Action, ActionPanel, Icon, List, LocalStorage } from "@raycast/api";
+import { useEffect, useState } from "react";
+
+let commandHistory = [];
+
+export default function History() {
+ const [history, setHistory] = useState([]);
+
+ useEffect(() => {
+ // Load history from LocalStorage when component mounts
+ LocalStorage.getItem("gemini_command_history").then((storedHistory) => {
+ if (storedHistory) {
Greptile
greptile
logic: Missing error handling for LocalStorage.getItem() - should wrap in try/catch and use showFailureToast from @raycast/utils
suggested fix
+ try {
+ const storedHistory = await LocalStorage.getItem("gemini_command_history");
if (storedHistory) {
diff block
logger.info("Sending usage reports to PostHog and Billing...") # noqa T201
time_now = datetime.now()
+
+ producer = get_sqs_producer("orders")
+ if not producer:
+ logger.error("Failed to get SQS producer for 'orders' queue")
+ return
Greptile
greptile
logic: Missing error handling for producer initialization failure - should raise an exception rather than just returning
suggested fix
producer = get_sqs_producer("orders")
if not producer:
+ error_msg = "Failed to get SQS producer for 'orders' queue"
+ logger.error(error_msg)
+ raise Exception(error_msg)
diff block
+import React, { useMemo, useState } from 'react'
+import { ScrollView, StyleProp, ViewStyle } from 'react-native'
+
+import { getDisplayOrderQuestions, SurveyAppearanceTheme } from '../surveys-utils'
+import { Survey, SurveyAppearance, SurveyQuestion } from '../../../../posthog-core/src/surveys-types'
+import { LinkQuestion, MultipleChoiceQuestion, OpenTextQuestion, RatingQuestion } from './QuestionTypes'
+import { PostHog } from '../../posthog-rn'
+import { usePostHog } from '../../hooks/usePostHog'
+
+const getSurveyInteractionProperty = (survey: Survey, action: string): string => {
+ let surveyProperty = `$survey_${action}/${survey.id}`
+ if (survey.current_iteration && survey.current_iteration > 0) {
+ surveyProperty = `$survey_${action}/${survey.id}/${survey.current_iteration}`
+ }
+
+ return surveyProperty
+}
+
+export const sendSurveyShownEvent = (survey: Survey, posthog: PostHog): void => {
+ posthog.capture('survey shown', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ })
+}
+
+export const sendSurveyEvent = (
+ responses: Record<string, string | number | string[] | null> = {},
+ survey: Survey,
+ posthog: PostHog
+): void => {
+ posthog.capture('survey sent', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ $survey_questions: survey.questions.map((question: SurveyQuestion) => question.question),
+ ...responses,
+ $set: {
+ [getSurveyInteractionProperty(survey, 'responded')]: true,
+ },
+ })
+}
+
+export const dismissedSurveyEvent = (survey: Survey, posthog: PostHog): void => {
+ posthog.capture('survey dismissed', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ $set: {
+ [getSurveyInteractionProperty(survey, 'dismissed')]: true,
+ },
+ })
+}
+
+export function Questions({
+ survey,
+ appearance,
+ styleOverrides,
+ onSubmit,
+}: {
+ survey: Survey
+ appearance: SurveyAppearanceTheme
+ styleOverrides?: StyleProp<ViewStyle>
+ onSubmit: () => void
+}): JSX.Element {
+ const [questionsResponses, setQuestionsResponses] = useState({})
+ const [currentQuestionIndex, setCurrentQuestionIndex] = useState(0)
+ const surveyQuestions = useMemo(() => getDisplayOrderQuestions(survey), [survey])
+ const posthog = usePostHog()
+
+ const onNextButtonClick = ({
+ res,
+ originalQuestionIndex,
+ }: // displayQuestionIndex,
+ {
+ res: string | string[] | number | null
+ originalQuestionIndex: number
+ // displayQuestionIndex: number
+ }): void => {
+ const responseKey = originalQuestionIndex === 0 ? `$survey_response` : `$survey_response_${originalQuestionIndex}`
+
+ setQuestionsResponses({ ...questionsResponses, [responseKey]: res })
+
+ const isLastDisplayedQuestion = originalQuestionIndex === survey.questions.length - 1
+ if (isLastDisplayedQuestion) {
+ sendSurveyEvent({ ...questionsResponses, [responseKey]: res }, survey, posthog)
+ onSubmit()
+ } else {
+ setCurrentQuestionIndex(originalQuestionIndex + 1)
+ }
+ }
+
+ const question = surveyQuestions[currentQuestionIndex]
+
+ return (
+ <ScrollView style={[styleOverrides, { flexGrow: 0 }]}>
+ {getQuestionComponent({
+ question,
+ appearance,
+ onSubmit: (res) =>
+ onNextButtonClick({
+ res,
+ originalQuestionIndex: question.originalQuestionIndex,
+ // displayQuestionIndex: currentQuestionIndex,
+ }),
+ })}
+ </ScrollView>
+ )
+}
+
+type GetQuestionComponentProps = {
+ question: SurveyQuestion
+ appearance: SurveyAppearance
+ onSubmit: (res: string | string[] | number | null) => void
+}
+
+const getQuestionComponent = (props: GetQuestionComponentProps): JSX.Element => {
+ const questionComponents = {
+ open: OpenTextQuestion,
+ link: LinkQuestion,
+ rating: RatingQuestion,
+ multiple_choice: MultipleChoiceQuestion,
+ single_choice: MultipleChoiceQuestion,
+ }
+
+ const Component = questionComponents[props.question.type]
Greptile
greptile
logic: missing error handling if question.type is invalid or Component is undefined
diff block
}
}
})
+
+export const useRegister = createPostMutationHook({
+ endpoint: RegisterCommand.TSQ_url,
+ bodySchema: RegisterCommand.RequestSchema,
+ responseSchema: RegisterCommand.ResponseSchema,
+ rMutationParams: {
+ onSuccess: (data) => {
+ notifications.show({
+ title: 'Register',
+ message: 'User registered successfully',
+ color: 'green'
+ })
+ setToken({ token: data.accessToken })
+ }
+ }
+})
Greptile
greptile
logic: Missing error handling for registration failures. The useLogin hook has error handling but useRegister does not.
suggested fix
export const useRegister = createPostMutationHook({
endpoint: RegisterCommand.TSQ_url,
bodySchema: RegisterCommand.RequestSchema,
responseSchema: RegisterCommand.ResponseSchema,
rMutationParams: {
onSuccess: (data) => {
notifications.show({
title: 'Register',
message: 'User registered successfully',
color: 'green'
})
setToken({ token: data.accessToken })
+ },
+ onError: (error) => {
notifications.show({
title: 'Register',
+ message: error.message,
+ color: 'red'
})
}
}
})
diff block
)
IntervalLiteral = Literal["minute", "hour", "day", "week", "month"]
+ORDERED_INTERVALS = ["minute", "hour", "day", "week", "month"]
+
+
+def compare_intervals(
+ interval1: IntervalLiteral, operator: Literal["<", "<=", "=", ">", ">="], interval2: IntervalLiteral
+) -> bool:
+ if operator == "<":
+ return ORDERED_INTERVALS.index(interval1) < ORDERED_INTERVALS.index(interval2)
+ elif operator == "<=":
+ return ORDERED_INTERVALS.index(interval1) <= ORDERED_INTERVALS.index(interval2)
+ elif operator == "=":
+ return ORDERED_INTERVALS.index(interval1) == ORDERED_INTERVALS.index(interval2)
+ elif operator == ">":
+ return ORDERED_INTERVALS.index(interval1) > ORDERED_INTERVALS.index(interval2)
+ elif operator == ">=":
+ return ORDERED_INTERVALS.index(interval1) >= ORDERED_INTERVALS.index(interval2)
Greptile
greptile
logic: Missing error handling for invalid interval values. If interval1 or interval2 are not in ORDERED_INTERVALS, this will raise a ValueError silently.
suggested fix
def compare_intervals(
interval1: IntervalLiteral, operator: Literal["<", "<=", "=", ">", ">="], interval2: IntervalLiteral
) -> bool:
+ if interval1 not in ORDERED_INTERVALS or interval2 not in ORDERED_INTERVALS:
+ raise ValueError(f"Invalid interval values: {interval1}, {interval2}")
if operator == "<":
return ORDERED_INTERVALS.index(interval1) < ORDERED_INTERVALS.index(interval2)
elif operator == "<=":
return ORDERED_INTERVALS.index(interval1) <= ORDERED_INTERVALS.index(interval2)
elif operator == "=":
return ORDERED_INTERVALS.index(interval1) == ORDERED_INTERVALS.index(interval2)
elif operator == ">":
return ORDERED_INTERVALS.index(interval1) > ORDERED_INTERVALS.index(interval2)
elif operator == ">=":
return ORDERED_INTERVALS.index(interval1) >= ORDERED_INTERVALS.index(interval2)
diff block
+import { useTerminal } from "./auth";
+import { useQuery } from "@tanstack/react-query";
+
+export type Brew = {
+ id: string;
+ varId: string;
+ title: string;
+ subTitle: string;
+ description: string;
+ price: number;
+ color: string;
+ subscription?: "allowed" | "required" | string;
+};
+
+export const useProducts = () => {
+ const terminal = useTerminal();
+ return useQuery({
+ queryKey: ["products"],
+ queryFn: async () => {
+ const products = await terminal.product.list().then((d) => d.data);
Greptile
greptile
logic: Missing error handling for failed API requests. Consider using try/catch and showFailureToast from @raycast/utils
suggested fix
+ const products = await terminal.product.list().catch((error) => {
+ showFailureToast("Failed to fetch products", error);
+ return { data: [] };
+ }).then((d) => d.data);
diff block
+import {
+ getPreferenceValues,
+ LocalStorage,
+ OAuth,
+ PreferenceValues,
+} from '@raycast/api'
+import fetch, { RequestInit } from 'node-fetch'
+
+interface workspace {
+ id: string
+ name: string
+ url: string
+}
+
+interface User {
+ id: string
+ workspace: workspace
+}
+
+export const baseURI = 'https://api.awork.com/api/v1'
+export let authorizationInProgress = false
+export let revalidating = false
+
+const preferences = getPreferenceValues<PreferenceValues>()
+
+export const client = new OAuth.PKCEClient({
+ providerName: 'awork',
+ redirectMethod: OAuth.RedirectMethod.Web,
+ description: 'Connect your awork account...',
+})
+
+const getRequestOptions = (body: URLSearchParams): RequestInit => ({
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ Authorization: `Basic ${btoa(preferences.clientId + ':' + preferences.clientSecret)}`,
+ },
+ body: body,
+ redirect: 'follow',
+})
+
+const authorizeClient = async () => {
+ if (await client.getTokens()) {
+ console.log('Already logged in!')
+ return
+ }
+
+ if (authorizationInProgress) {
+ console.log('Already trying to login!')
+ return
+ }
+ authorizationInProgress = true
+
+ const authRequest = await client.authorizationRequest({
+ endpoint: `${baseURI}/accounts/authorize`,
+ clientId: preferences.clientId,
+ scope: 'offline_access',
+ extraParameters: { clientSecret: preferences.clientSecret },
+ })
+ const { authorizationCode } = await client.authorize(authRequest)
+ const body = new URLSearchParams()
+ body.append(
+ 'redirect_uri',
+ 'https://raycast.com/redirect?packageName=Extension',
+ )
+ body.append('grant_type', 'authorization_code')
+ body.append('code', authorizationCode)
+
+ await fetch(`${baseURI}/accounts/token`, getRequestOptions(body))
+ .then((response) => response.text())
+ .then((result) => {
+ client.setTokens(<OAuth.TokenResponse>JSON.parse(result))
+ })
+ .catch((error: Error) => console.error(error))
+ if (await client.getTokens()) {
+ console.log('Logged in successfully!')
+ await getUserData()
+ }
+ authorizationInProgress = false
+}
+
+export const refreshToken = async () => {
+ const tokens = await client.getTokens()
+ if (!tokens) {
+ await authorizeClient()
+ return
+ } else {
+ if (revalidating) {
+ return
+ }
+ revalidating = true
+ if (!tokens.refreshToken) {
+ return
+ }
Greptile
greptile
logic: Missing error handling when refresh token is not available - should throw an error or show a toast to prompt re-authentication.
diff block
+import { combineTransactionSteps, Extension, findChildrenInRange, getChangedRanges } from '@tiptap/core'
+import { nanoid } from 'nanoid'
+import { Fragment, Slice } from 'prosemirror-model'
+import { Plugin, PluginKey } from 'prosemirror-state'
+
+function createId() {
+ let id = nanoid(8)
+ return id
+}
+
+/**
+ * Code from Tiptap UniqueID extension (https://tiptap.dev/api/extensions/unique-id)
+ * This extension is licensed under MIT (even though it's part of Tiptap pro).
+ *
+ * If you're a user of BlockNote, we still recommend to support their awesome work and become a sponsor!
+ * https://tiptap.dev/pro
+ */
+
+/**
+ * Removes duplicated values within an array.
+ * Supports numbers, strings and objects.
+ */
+function removeDuplicates(array: any, by = JSON.stringify) {
+ const seen: any = {}
+ return array.filter((item: any) => {
+ const key = by(item)
+ return Object.prototype.hasOwnProperty.call(seen, key) ? false : (seen[key] = true)
+ })
+}
+
+/**
+ * Returns a list of duplicated items within an array.
+ */
+function findDuplicates(items: any) {
+ const filtered = items.filter((el: any, index: number) => items.indexOf(el) !== index)
+ const duplicates = removeDuplicates(filtered)
+ return duplicates
+}
+
+const UniqueID = Extension.create({
+ name: 'uniqueID',
+ // we’ll set a very high priority to make sure this runs first
+ // and is compatible with `appendTransaction` hooks of other extensions
+ priority: 10000,
+ addOptions() {
+ return {
+ attributeName: 'id',
+ types: [],
+ generateID: () => {
+ // Use mock ID if tests are running.
+ if (typeof window !== 'undefined' && (window as any).__TEST_OPTIONS) {
+ const testOptions = (window as any).__TEST_OPTIONS
+ if (testOptions.mockID === undefined) {
+ testOptions.mockID = 0
+ } else {
+ testOptions.mockID++
+ }
+
+ return testOptions.mockID.toString() as string
+ }
+
+ return createId()
+ },
+ filterTransaction: null,
+ }
+ },
+ addGlobalAttributes() {
+ return [
+ {
+ types: this.options.types,
+ attributes: {
+ [this.options.attributeName]: {
+ default: null,
+ parseHTML: (element) => element.getAttribute(`data-${this.options.attributeName}`),
+ renderHTML: (attributes) => ({
+ [`data-${this.options.attributeName}`]: attributes[this.options.attributeName],
+ }),
+ },
+ },
+ },
+ ]
+ },
+ // check initial content for missing ids
+ // onCreate() {
+ // // Don’t do this when the collaboration extension is active
+ // // because this may update the content, so Y.js tries to merge these changes.
+ // // This leads to empty block nodes.
+ // // See: https://github.com/ueberdosis/tiptap/issues/2400
+ // if (
+ // this.editor.extensionManager.extensions.find(
+ // (extension) => extension.name === "collaboration"
+ // )
+ // ) {
+ // return;
+ // }
+ // const { view, state } = this.editor;
+ // const { tr, doc } = state;
+ // const { types, attributeName, generateID } = this.options;
+ // const nodesWithoutId = findChildren(doc, (node) => {
+ // return (
+ // types.includes(node.type.name) && node.attrs[attributeName] === null
+ // );
+ // });
+ // nodesWithoutId.forEach(({ node, pos }) => {
+ // tr.setNodeMarkup(pos, undefined, {
+ // ...node.attrs,
+ // [attributeName]: generateID(),
+ // });
+ // });
+ // tr.setMeta("addToHistory", false);
+ // view.dispatch(tr);
+ // },
+ addProseMirrorPlugins() {
+ let dragSourceElement: any = null
+ let transformPasted = false
+ return [
+ new Plugin({
+ key: new PluginKey('uniqueID'),
+ appendTransaction: (transactions, oldState, newState) => {
+ // console.log("appendTransaction");
+ const docChanges =
+ transactions.some((transaction) => transaction.docChanged) && !oldState.doc.eq(newState.doc)
+ const filterTransactions =
+ this.options.filterTransaction &&
+ transactions.some((tr) => {
+ let _a, _b
+ return !((_b = (_a = this.options).filterTransaction) === null || _b === void 0
+ ? void 0
+ : _b.call(_a, tr))
+ })
+ if (!docChanges || filterTransactions) {
+ return
+ }
+ const { tr } = newState
+ const { types, attributeName, generateID } = this.options
+ const transform = combineTransactionSteps(oldState.doc, transactions as any)
+ const { mapping } = transform
+ // get changed ranges based on the old state
+ const changes = getChangedRanges(transform)
+
+ changes.forEach(({ newRange }) => {
+ const newNodes = findChildrenInRange(newState.doc, newRange, (node) => {
+ return types.includes(node.type.name)
+ })
+ const newIds = newNodes.map(({ node }) => node.attrs[attributeName]).filter((id) => id !== null)
+ const duplicatedNewIds = findDuplicates(newIds)
+ newNodes.forEach(({ node, pos }) => {
+ let _a
+ // instead of checking `node.attrs[attributeName]` directly
+ // we look at the current state of the node within `tr.doc`.
+ // this helps to prevent adding new ids to the same node
+ // if the node changed multiple times within one transaction
+ const id = (_a = tr.doc.nodeAt(pos)) === null || _a === void 0 ? void 0 : _a.attrs[attributeName]
+ if (id === null) {
+ tr.setNodeMarkup(pos, undefined, {
+ ...node.attrs,
+ [attributeName]: generateID(),
+ })
+ return
+ }
+ // check if the node doesn’t exist in the old state
+ const { deleted } = mapping.invert().mapResult(pos)
+ const newNode = deleted && duplicatedNewIds.includes(id)
+ if (newNode) {
+ tr.setNodeMarkup(pos, undefined, {
+ ...node.attrs,
+ [attributeName]: generateID(),
+ })
+ }
+ })
+ })
+ if (!tr.steps.length) {
+ return
+ }
+ return tr
+ },
+ // we register a global drag handler to track the current drag source element
+ view(view) {
+ const handleDragstart = (event: any) => {
+ let _a
+ dragSourceElement = (
+ (_a = view.dom.parentElement) === null || _a === void 0 ? void 0 : _a.contains(event.target)
+ )
+ ? view.dom.parentElement
+ : null
+ }
Greptile
greptile
style: Event handler uses any type and has complex null checks that could be simplified with optional chaining. Also missing error handling for invalid event targets.
diff block
+// import { showToast, Toast } from "@raycast/api";
+import { useCachedPromise, CachedPromiseOptions } from "@raycast/utils";
+import { FunctionReturningPromise } from "@raycast/utils/dist/types";
+
+// import { useExtensionContext } from "@/context/ExtensionContext";
+
+export const useSafeCachedPromise: typeof useCachedPromise = <T extends FunctionReturningPromise, U = undefined>(
+ fn: T,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ args?: any,
+ options?: CachedPromiseOptions<T, U>,
+) => {
+ // const handleError = useHandleError(useExtensionContext());
+ return useCachedPromise(fn, args, {
+ ...options,
+ // onError: handleError,
+ });
Greptile
greptile
logic: Missing error handling could cause silent failures in API calls. Either implement the commented error handler or remove 'Safe' from the hook name
diff block
Ok(())
}
+#[cfg(feature = "enterprise")]
+pub async fn get_user_roles(user_email: &str, org_id: Option<&str>) -> Vec<String> {
+ if let Some(roles) = check_cache(user_email).await {
+ return get_user_roles_by_org_id(roles, org_id);
+ }
+
+ let roles = o2_openfga::authorizer::roles::get_roles_for_user(user_email).await;
+ update_cache(user_email, roles.clone()).await;
+
+ get_user_roles_by_org_id(roles, org_id)
+}
+#[cfg(feature = "enterprise")]
+fn get_user_roles_by_org_id(roles: Vec<String>, org_id: Option<&str>) -> Vec<String> {
+ match org_id {
+ Some(org_id) => roles
+ .iter()
+ .filter_map(|role| {
+ let parts: Vec<&str> = role.split('/').collect();
+ if parts.first() == Some(&org_id) {
+ Some(parts.get(1).unwrap().to_string())
+ } else {
+ None
+ }
+ })
+ .collect(),
+ None => roles,
+ }
+}
Greptile
greptile
logic: Missing error handling for array index access with unwrap() on parts.get(1). Could panic if role string format is invalid.
suggested fix
fn get_user_roles_by_org_id(roles: Vec<String>, org_id: Option<&str>) -> Vec<String> {
match org_id {
Some(org_id) => roles
.iter()
.filter_map(|role| {
let parts: Vec<&str> = role.split('/').collect();
if parts.first() == Some(&org_id) {
+ parts.get(1).map(|s| s.to_string())
} else {
None
}
})
.collect(),
None => roles,
}
}
diff block
const { pop } = useNavigation();
const update = trpc.space.update.useMutation();
- async function handleSubmit() {
- try {
- await update.mutateAsync({ spaceId, [keyToEdit]: editingValue });
- showToast({
- style: Toast.Style.Success,
- title: "Updated space",
- });
- pop();
- } catch (error) {
- showToast({
- style: Toast.Style.Failure,
- title: "Failed to update space",
- });
- }
+ function handleSubmit() {
+ update.mutate(
+ { spaceId, [keyToEdit]: editingValue },
+ {
+ onSuccess: () => {
+ showToast({
+ style: Toast.Style.Success,
+ title: "Updated space",
+ });
+ pop();
+ },
+ },
+ );
Greptile
greptile
logic: Mutation missing error handling. Consider adding an onError callback to display a failure toast (e.g., using showFailureToast) on update failure.
suggested fix
function handleSubmit() {
update.mutate(
{ spaceId, [keyToEdit]: editingValue },
{
onSuccess: () => {
showToast({
style: Toast.Style.Success,
title: "Updated space",
});
pop();
},
+ onError: (error) => {
+ showFailureToast(error, { title: "Failed to update space" });
},
},
);
diff block
const { pop } = useNavigation();
const invite = trpc.user.inviteMembers.useMutation();
- async function handleSubmit() {
- try {
- await invite.mutateAsync({ spaceId, emails });
- showToast({
- style: Toast.Style.Success,
- title: "Sent invitations",
- });
- pop();
- } catch (error) {
- showToast({
- style: Toast.Style.Failure,
- title: "Failed to send invitations",
- });
- }
+ function handleSubmit() {
+ invite.mutate(
+ { spaceId, emails },
+ {
+ onSuccess: () => {
+ showToast({
+ style: Toast.Style.Success,
+ title: "Sent invitations",
+ });
+ pop();
+ },
+ },
+ );
}
Greptile
greptile
logic: Missing error handling callback for invite.mutate. Consider adding an onError callback to display a failure toast when the mutation fails.
suggested fix
function handleSubmit() {
invite.mutate(
{ spaceId, emails },
{
onSuccess: () => {
showToast({
style: Toast.Style.Success,
title: "Sent invitations",
});
pop();
},
+ onError: (error) => {
+ showFailureToast(error, { title: "Failed to send invitations" });
},
},
);
+ }
diff block
import { getUUID, getWebSocketUrl } from "@/utils/zincutils";
import useWebSocket from "@/composables/useWebSocket";
import type { SearchRequestPayload } from "@/ts/interfaces";
+import { useStore } from "vuex";
+import { ref } from "vue";
+
+type MessageHandler = (event: MessageEvent) => void;
+type OpenHandler = (event: Event) => void;
+type CloseHandler = (event: CloseEvent) => void;
+type ErrorHandler = (event: Event) => void;
+
+type WebSocketHandler = MessageHandler | CloseHandler | ErrorHandler;
+
+type HandlerMap = Record<"message" | "close" | "error", WebSocketHandler[]>;
+
+const webSocket = useWebSocket();
+
+const traces: Record<string, HandlerMap> = {};
+
+const openHandlers: OpenHandler[] = [];
+
+const socketId = ref<string | null>(null);
+
+const isCreatingSocket = ref(false);
const useSearchWebSocket = () => {
- const webSocket = useWebSocket();
+ const store = useStore();
+
+ const onOpen = (response: any) => {
+ isCreatingSocket.value = false;
+ openHandlers.forEach((handler) => handler(response));
+ openHandlers.length = 0;
+ };
+
+ const onMessage = (response: any) => {
+ if (response.type === "end") {
+ traces[response.content.trace_id]?.close?.forEach((handler) =>
+ handler(response),
+ );
+ cleanUpListeners(response.content.trace_id);
+ }
+ traces[response.content.trace_id]?.message?.forEach((handler) =>
+ handler(response),
+ );
+ };
+
+ const onClose = (response: any) => {
+ isCreatingSocket.value = false;
+ socketId.value = null;
+ Object.keys(traces).forEach((traceId) => {
+ traces[traceId]?.close.forEach((handler) => handler(response));
+ cleanUpListeners(traceId);
+ });
+ };
+
+ const onError = (response: any) => {
+ traces[response.content.trace_id].error.forEach((handler) =>
+ handler(response),
+ );
+ // cleanUpListeners(response.traceId)
+ };
Greptile
greptile
logic: Missing error handling - accessing trace_id directly could cause runtime errors if response.content is undefined. Should add null checks.
suggested fix
const onError = (response: any) => {
+ const traceId = response?.content?.trace_id;
+ if (traceId && traces[traceId]) {
+ traces[traceId].error.forEach((handler) =>
handler(response),
);
}
// cleanUpListeners(response.traceId)
};
diff block
import glom
# encoded by parse_html
- target, title = None, None
+ target, title, actions = None, None, None
parts = csv_decode_row(button.button_id)
if len(parts) >= 3:
target = parts[1]
+ actions = parts[2]
title = parts[-1]
bot.request_overrides = bot.request_overrides or {}
- glom.assign(
- bot.request_overrides,
- target or "input_prompt",
- title or button.button_title,
- )
+ if len(parts) >= 3 and "send_location" in actions:
+ location_text = _handle_location_msg(location_coords)
+ glom.assign(
+ bot.request_overrides,
+ target or "input_prompt",
+ location_text or "error receiving location information",
+ )
Greptile
greptile
logic: Missing error handling for case where location_coords is None but 'send_location' action is present
suggested fix
if len(parts) >= 3 and "send_location" in actions:
+ if not location_coords:
+ location_text = "Error: No location coordinates provided"
+ else:
location_text = _handle_location_msg(location_coords)
glom.assign(
bot.request_overrides,
target or "input_prompt",
location_text or "error receiving location information",
)
diff block
actorAuthMethod: ActorAuthMethod,
actorOrgId: string | undefined
) => {
- const { permission, membership } = await permissionService.getUserProjectPermission({
- userId,
+ const { permission, membership } = await permissionService.getProjectPermission({
+ actor: ActorType.USER,
+ actorId: userId,
projectId,
- authMethod: actorAuthMethod,
- userOrgId: actorOrgId,
+ actorAuthMethod,
+ actorOrgId,
actionProjectType: ActionProjectType.Any
});
- return { permissions: packRules(permission.rules), membership };
+ // just to satisfy ts
+ if (!("roles" in membership)) throw new BadRequestError({ message: "Service token not allowed" });
+
+ const projectAssumeRole = requestContext.get("projectAssumeRole");
+ const isImpersonating = projectAssumeRole?.projectId === projectId;
+ const impersonation = isImpersonating
+ ? {
+ actorId: projectAssumeRole?.actorId,
+ actorType: projectAssumeRole?.actorType,
+ actorName: "",
+ actorEmail: ""
+ }
+ : undefined;
+ if (impersonation?.actorType === ActorType.IDENTITY) {
+ const identityDetails = await identityDAL.findById(impersonation.actorId);
+ impersonation.actorName = identityDetails.name;
Greptile
greptile
logic: Missing error handling if identity lookup fails
suggested fix
const identityDetails = await identityDAL.findById(impersonation.actorId);
+ impersonation.actorName = identityDetails?.name ?? "";
diff block
}
for _, obj := range objectNames {
- // Constructing the request url.
url, err := c.constructMetadataURL(obj)
if err != nil {
return nil, err
}
- resp, err := c.Client.Get(ctx, url.String())
+ httpResp, body, err := c.Client.HTTPClient.Get(ctx, url.String())
if err != nil {
+ logging.Logger(ctx).Info("failed to get metadata", "object", obj, "body", body, "err", err.Error())
runFallback(c.Module.ID, obj, &metadataResult)
continue
}
+ defer httpResp.Body.Close()
+
+ resp, err := common.ParseJSONResponse(httpResp, body)
+ if err != nil {
+ logging.Logger(ctx).Info("failed to parse metadata response", "object", obj, "body", body, "err", err.Error())
Greptile
greptile
logic: Missing error handling after continue - should add to metadataResult.Errors
suggested fix
logging.Logger(ctx).Info("failed to parse metadata response", "object", obj, "body", body, "err", err.Error())
+ metadataResult.Errors[obj] = err
diff block
+import { useEffect } from "react";
+import { Controller, useForm } from "react-hook-form";
+import { BsMicrosoftTeams } from "react-icons/bs";
+import { zodResolver } from "@hookform/resolvers/zod";
+import { z } from "zod";
+
+import { createNotification } from "@app/components/notifications";
+import {
+ Accordion,
+ AccordionContent,
+ AccordionItem,
+ AccordionTrigger,
+ Button,
+ FormControl,
+ Input
+} from "@app/components/v2";
+import { useToggle } from "@app/hooks";
+import { useUpdateServerConfig } from "@app/hooks/api";
+import { AdminIntegrationsConfig } from "@app/hooks/api/admin/types";
+
+const microsoftTeamsFormSchema = z.object({
+ appId: z.string(),
+ clientSecret: z.string(),
+ botId: z.string()
+});
+
+type TMicrosoftTeamsForm = z.infer<typeof microsoftTeamsFormSchema>;
+
+type Props = {
+ adminIntegrationsConfig?: AdminIntegrationsConfig;
+};
+
+export const MicrosoftTeamsIntegrationForm = ({ adminIntegrationsConfig }: Props) => {
+ const { mutateAsync: updateAdminServerConfig } = useUpdateServerConfig();
+ const [isMicrosoftTeamsAppIdFocused, setIsMicrosoftTeamsAppIdFocused] = useToggle();
+ const [isMicrosoftTeamsClientSecretFocused, setIsMicrosoftTeamsClientSecretFocused] = useToggle();
+ const [isMicrosoftTeamsBotIdFocused, setIsMicrosoftTeamsBotIdFocused] = useToggle();
+ const {
+ control,
+ handleSubmit,
+ setValue,
+ formState: { isSubmitting, isDirty }
+ } = useForm<TMicrosoftTeamsForm>({
+ resolver: zodResolver(microsoftTeamsFormSchema)
+ });
+
+ const onSubmit = async (data: TMicrosoftTeamsForm) => {
+ await updateAdminServerConfig({
+ microsoftTeamsAppId: data.appId,
+ microsoftTeamsClientSecret: data.clientSecret,
+ microsoftTeamsBotId: data.botId
+ });
+
+ createNotification({
+ text: "Updated admin Microsoft Teams configuration",
+ type: "success"
+ });
+ };
Greptile
greptile
logic: Missing error handling for updateAdminServerConfig failure - should show error notification if update fails
suggested fix
const onSubmit = async (data: TMicrosoftTeamsForm) => {
+ try {
await updateAdminServerConfig({
microsoftTeamsAppId: data.appId,
microsoftTeamsClientSecret: data.clientSecret,
microsoftTeamsBotId: data.botId
});
createNotification({
text: "Updated admin Microsoft Teams configuration",
type: "success"
});
+ } catch (error) {
createNotification({
+ text: "Failed to update Microsoft Teams configuration",
+ type: "error"
});
+ }
};
diff block
+import { Client } from "@modelcontextprotocol/sdk/client/index.js";
+import { StdioClientTransport, StdioServerParameters } from "@modelcontextprotocol/sdk/client/stdio.js";
+import { McpServerConfig, McpServerTool, McpToolInfo } from "./types";
+import { execSync } from "child_process";
+import os from "os";
+import { OllamaApiChatMessageToolCall, OllamaApiTool } from "../ollama/types";
+import { ConvertMcpToolsToOllamaTools } from "./utils";
+
+// Mcp Client with multiple Mcp Server support.
+export class McpClientMultiServer {
+ /* Mcp Client Map where the key is the Mcp Server Name. */
+ private _client = new Map<string, McpClient>();
+
+ /* Tools Map where key is the modified tool name and the value is the Mcp Server name. */
+ private _clientToolsFunctionNames = new Map<string, string>();
+
+ /* Tools Cache where the key is the Mcp Server Name. Tools are saved with the modified names. */
+ private _cacheTools = new Map<string, McpServerTool[]>();
+ private _cacheToolsOllama = new Map<string, OllamaApiTool[]>();
+
+ /**
+ * @param config - Mcp Config.
+ * @param initEnvsPath - Set to false for skip envs and path initialization.
+ */
+ constructor(config: McpServerConfig, initEnvsPath = true) {
+ /* Get Mcp Server names. */
+ const serverNames = Object.keys(config.mcpServers);
+ if (serverNames.length === 0) throw "Mcp Config do not contain any server configuration.";
+
+ /* Initialize a client for each Mcp Server. */
+ serverNames.forEach((server) => {
+ this._client.set(server, new McpClient(config.mcpServers[server], initEnvsPath));
+ if (!initEnvsPath) initEnvsPath = false;
+ });
+ }
+
+ /**
+ * Rename Tools Function Name avoiding name collision between Mcp Server functions names.
+ * All news names are saved on _clientToolsMap.
+ * @param server - Mcp Server name.
+ * @param tools - Tools array.
+ * @returns Tools with the new names.
+ */
+ private _renameToolsFunctionName(server: string, tools: McpServerTool[]): McpServerTool[] {
+ return tools.map((tool) => {
+ const name = `${server}_${tool.name}`;
+ tool.name = name;
+ if (!this._clientToolsFunctionNames.has(name)) this._clientToolsFunctionNames.set(name, server);
+ return tool;
+ });
+ }
+
+ /**
+ * Restore Tools Function Name to the original one.
+ * @param server - Mcp Server name.
+ * @param tools - Tools array.
+ * @returns Tools with the original names.
+ */
+ private _restoreToolsFunctionName(
+ server: string,
+ tools: OllamaApiChatMessageToolCall[]
+ ): OllamaApiChatMessageToolCall[] {
+ return tools.map((tool) => {
+ const name = tool.function.name.replace(new RegExp(`^${server}_`), "");
+ tool.function.name = name;
+ return tool;
+ });
+ }
+
+ /**
+ * Get Tools from configured Mcp Server.
+ * @param use_cache - disable tools cache.
+ * @param server - mcp server names. Use only if you need to limit tools retrieve.
+ */
+ async GetTools(use_cache = true, server: string[] = [...this._client.keys()]): Promise<McpServerTool[]> {
+ let tools: McpServerTool[] = [];
+
+ /* Get Tools from Mcp Server. */
+ const tasks = await Promise.all(
+ server.map(async (name): Promise<McpServerTool[] | undefined> => {
+ /* Get Tools from cache if defined and cache is enabled */
+ if (use_cache && this._cacheTools.has(name)) return this._cacheTools.get(name);
+
+ /* Get Tools */
+ if (this._client.has(name)) {
+ try {
+ const tools = await this._client
+ .get(name)!
+ .GetTools(false)
+ .then((t) => this._renameToolsFunctionName(name, t));
+
+ if (tools.length > 0) {
+ /* Save Tools on cache */
+ this._cacheTools.set(name, tools);
+ return tools;
+ }
+ } catch (e) {
+ console.error(`[ERROR] Mcp Client - Server Name: "${name}" - Error: "${e}"`);
+ }
+ }
+
+ /* Return undefined if Mcp Server name is not configured */
+ return undefined;
+ })
+ );
+
+ // Concat all defined result.
+ tasks.forEach((task) => {
+ if (task) tools = tools.concat(task);
+ });
+
+ return tools;
+ }
+
+ /**
+ * Get Tools from configured Mcp Server in Ollama format.
+ * @param use_cache - disable tools cache.
+ * @param server - mcp server names. Use only if you need to limit tools retrieve.
+ */
+ async GetToolsOllama(use_cache = true, server: string[] = [...this._client.keys()]): Promise<OllamaApiTool[]> {
+ let tools: OllamaApiTool[] = [];
+
+ /* Get Tools from Mcp Server. */
+ const tasks = await Promise.all(
+ server.map(async (name): Promise<OllamaApiTool[] | undefined> => {
+ /* Get Tools from cacheToolsOllama if defined and cache is enabled */
+ if (use_cache && this._cacheToolsOllama.has(name)) return this._cacheToolsOllama.get(name)!;
+
+ /* Get Tools from Mcp Server and save on cache */
+ const t = ConvertMcpToolsToOllamaTools(await this.GetTools(use_cache, [name]));
+ if (t.length > 0) {
+ this._cacheToolsOllama.set(name, t);
+ return t;
+ }
+
+ return undefined;
+ })
+ );
+
+ /* Concat all defined result. */
+ tasks.forEach((task) => {
+ if (task) tools = tools.concat(task);
+ });
+
+ return tools;
+ }
+
+ /**
+ * Get Tools info.
+ * @param tools - Tools in Ollama Format.
+ */
+ GetToolsInfoForOllama(tools: OllamaApiChatMessageToolCall[]): McpToolInfo[] {
+ const o: McpToolInfo[] = [];
+
+ tools.forEach((t) => {
+ /* Get Mcp Server Name */
+ const server = this._clientToolsFunctionNames.get(t.function.name);
+ if (!server) return;
+
+ /* Get Original function Name */
+ const name = t.function.name.replace(new RegExp(`^${server}_`), "");
+
+ o.push({
+ server: server,
+ function: name,
+ arguments: t.function.arguments,
+ });
+ });
+
+ return o;
+ }
+
+ /**
+ * Call Tools on Mcp Server.
+ * @param tools - Tools Call on Ollama Format,
+ * @returns Tools call results array.
+ */
+ async CallToolsForOllama(tools: OllamaApiChatMessageToolCall[]): Promise<any[]> {
+ let results: any[] = [];
+
+ /* Aggregate tool call by mcp server */
+ const toolsMap = new Map<string, OllamaApiChatMessageToolCall[]>();
+ tools.forEach((tool) => {
+ /* Get Mcp Server Name */
+ const name = this._clientToolsFunctionNames.get(tool.function.name);
+ if (!name) return;
+
+ /* Add tool to toolsMap */
+ toolsMap.has(name) ? toolsMap.get(name)!.push(tool) : toolsMap.set(name, [tool]);
+ });
+
+ const tasks = await Promise.all(
+ [...toolsMap.keys()].map(async (name): Promise<any[] | undefined> => {
+ /* Get Mcp Client and Tools */
+ const client = this._client.get(name)!;
+ const tools = this._restoreToolsFunctionName(name, toolsMap.get(name)!);
+
+ /* Call Tools */
+ try {
+ return await client.CallToolsForOllama(tools);
+ } catch (e) {
+ console.error(`[ERROR] Mcp Client - Server Name: "${name}" - Error: "${e}"`);
+ }
+
+ return undefined;
+ })
+ );
+
+ /* Concat all defined result. */
+ tasks.forEach((task) => {
+ if (task) results = results.concat(task);
+ });
+
+ return results;
+ }
+}
+
+/* Mcp Client with single Mcp Server support. */
+export class McpClient {
+ private _client: Client;
+ private _connected = false;
+
+ private _config: StdioServerParameters;
+
+ private _tools: McpServerTool[] | undefined;
+ private _ollamaTools: OllamaApiTool[] | undefined;
+
+ /**
+ * @param config - Mcp Config, only local server are supported.
+ * @param initEnvsPath - Set to false for skip envs and path initialization.
+ */
+ constructor(config: StdioServerParameters, initEnvsPath = true) {
+ /* Mcp Client Config. */
+ this._client = new Client(
+ {
+ name: "raycast-extension-ollama",
+ version: "1.0.0",
+ },
+ {
+ capabilities: {
+ tools: {},
+ },
+ }
+ );
+
+ this._client.onclose = () => (this._connected = false);
+
+ /* Initiliazie $PATH and global ENVS. */
+ if (initEnvsPath) {
+ this._initEnvs();
+ this._initPaths();
+ }
+
+ /* Save Stdio Config */
+ this._config = config;
+ }
+
+ /*
+ * Add user defined envs on process.env.
+ */
+ private _initEnvs(): void {
+ const shell = os.userInfo().shell || "/bin/sh";
+ try {
+ execSync(`LC_ALL=en_US.UTF-8 ${shell} -L -i -c 'printenv'`, { encoding: "utf8" })
+ .split("\n")
+ .forEach((l) => {
+ const [k, v] = l.split("=");
+ if (k && v) {
+ process.env[k] = v;
+ }
+ });
+ } catch (e) {
+ console.error("Error retrieving user shell envs:", e);
+ }
+ }
+
+ /*
+ * Add user difined paths on process.env.PATH.
+ */
+ private _initPaths(): void {
+ const shell = os.userInfo().shell || "/bin/sh";
+ try {
+ const path = execSync(`${shell} -l -c 'echo $PATH'`).toString().trim();
+ process.env.PATH = path;
+ } catch (e) {
+ console.error("Error retrieving user shell paths:", e);
+ }
+ }
+
+ /**
+ * Mcp Client Connect to the server.
+ */
+ private async _connect(): Promise<void> {
+ if (!this._connected) {
+ await this._client.connect(new StdioClientTransport(this._config));
+ this._connected = true;
+ }
+ }
+
+ /**
+ * Get Available Tools from Mcp Server.
+ * @param use_cache - disable tools cache.
+ */
+ async GetTools(use_cache = true): Promise<McpServerTool[]> {
+ if (use_cache && this._tools) return this._tools;
+
+ await this._connect();
+
+ try {
+ const tools = await this._client.listTools();
+ this._tools = tools.tools.map((tool) => {
+ return {
+ name: tool.name,
+ description: tool.description,
+ inputSchema: tool.inputSchema,
+ };
+ });
+ return this._tools;
+ } finally {
+ await this._client.close();
+ }
+ }
+
+ /**
+ * Get Available Tools from Mcp Server in Ollama Tools Format.
+ * @param use_cache - disable tools cache.
+ */
+ async GetToolsForOllama(use_cache = true): Promise<OllamaApiTool[]> {
+ if (use_cache && this._ollamaTools) return this._ollamaTools;
+ const tools = await this.GetTools(use_cache);
+ this._ollamaTools = ConvertMcpToolsToOllamaTools(tools);
+ return this._ollamaTools;
+ }
+
+ /**
+ * Call Tools on Mcp Server.
+ * @param tools - Ollama Message Tool Calls.
+ */
+ async CallToolsForOllama(tools: OllamaApiChatMessageToolCall[]): Promise<any[]> {
+ await this._connect();
+
+ try {
+ return await Promise.all(
+ tools.map(async (tool): Promise<any> => {
+ const result = await this._client.callTool(tool.function);
+ return result.content;
+ })
+ );
+ } finally {
+ await this._client.close();
+ }
Greptile
greptile
logic: Missing error handling for individual tool calls - errors in one tool shouldn't fail the entire batch
diff block
+import { ActionPanel, Action, Icon, List, LocalStorage, Form, useNavigation } from "@raycast/api";
+import React, { useEffect, useState } from "react";
+import { KDEConnect, KDEDevice } from "./device";
+import { SendType, SendTypeAllCases, appExists, startApp } from "./connector";
+import { StorageKey } from "./storage";
+import GetKDEConnect from "./getKDEConnect";
+
+const connect = new KDEConnect();
+
+export default function Command() {
+ const [loading, setLoading] = useState<boolean>(true);
+ const [appOk, setAppOk] = useState<boolean | undefined>();
+ const [devices, setDevices] = useState<KDEDevice[]>([]);
+ const [favouriteDevice, setFavouriteDevice] = useState<string | undefined>();
+
+ const { push } = useNavigation();
+
+ const refreshDevices = async () => {
+ setLoading(true);
+ await startApp();
+ console.log("App ready");
+ const discoveredDevices = await connect.listDevices();
+ setDevices(discoveredDevices);
+ setLoading(false);
+ };
+
+ useEffect(() => {
+ setAppOk(appExists());
+ if (!appOk) {
+ return;
+ }
+ }, []);
+
+ useEffect(() => {
+ if (appOk) {
+ LocalStorage.getItem(StorageKey.pairedDevices).then((value) => {
+ if (loading && value) {
+ const devices = JSON.parse(value as string) as KDEDevice[];
+ setDevices(devices);
+ }
+ });
+ LocalStorage.getItem(StorageKey.favouriteDevice).then((value) => {
+ if (loading && value) {
+ const device = value as string;
+ setFavouriteDevice(device);
+ }
+ });
+ refreshDevices();
+ }
+ }, [appOk]);
+
+ const deviceStatus = (item: KDEDevice): string => {
+ return item.paired ? (item.reachable ? "Connected" : "Unreachable") : "Not Paired";
+ };
+
+ const deviceStatusIcon = (item: KDEDevice): Icon => {
+ return item.paired ? (item.reachable ? Icon.Bolt : Icon.BoltDisabled) : Icon.Link;
+ };
+
+ const setFavourite = (device?: KDEDevice) => {
+ if (!device) {
+ LocalStorage.removeItem(StorageKey.favouriteDevice);
+ } else {
+ LocalStorage.setItem(StorageKey.favouriteDevice, device.id);
+ }
+ setFavouriteDevice(device?.id);
+ };
+
+ if (appOk === false) {
+ return <GetKDEConnect />;
+ }
+
+ return (
+ <List isLoading={loading}>
+ <List.Section title="Paired Devices">
+ {devices
+ .filter((entry) => entry.paired)
+ .map((item) => (
+ <List.Item
+ key={item.id}
+ icon={Icon.Mobile}
+ title={item.name}
+ subtitle={item.id}
+ accessories={(item.id === favouriteDevice ? [{ icon: Icon.Star, text: "Favourite" }] : []).concat([
+ { icon: deviceStatusIcon(item), text: deviceStatus(item) },
+ ])}
+ actions={
+ <ActionPanel>
+ <Action
+ title="Send…"
+ icon={Icon.ArrowUpCircleFilled}
+ onAction={() => {
+ connect.deviceID = item.id;
+ push(<DeviceActions device={item} connect={connect} />);
+ }}
+ />
+ {item.id === favouriteDevice ? (
+ <Action
+ title="Unset Favourite"
+ icon={Icon.StarDisabled}
+ onAction={() => {
+ setFavourite(undefined);
+ }}
+ />
+ ) : (
+ <Action
+ title="Set Favourite"
+ icon={Icon.Star}
+ onAction={() => {
+ setFavourite(item);
+ }}
+ />
+ )}
+ <Action
+ title="Ping"
+ icon={Icon.Network}
+ onAction={() => {
+ connect.ping(item.id);
+ }}
+ />
+ <Action
+ title="Unpair"
+ icon={Icon.Trash}
+ onAction={() => {
+ connect.unpairDevice(item.id).then(() => {
+ refreshDevices();
+ });
+ }}
+ />
+ </ActionPanel>
+ }
+ />
+ ))}
+ </List.Section>
+ <List.Section title="Discovered Devices">
+ {devices
+ .filter((entry) => !entry.paired)
+ .map((item) => (
+ <List.Item
+ key={item.id}
+ icon={Icon.Mobile}
+ title={item.name}
+ subtitle={item.id}
+ accessories={(item.id === favouriteDevice ? [{ icon: Icon.Star, text: "Favourite" }] : []).concat([
+ { icon: deviceStatusIcon(item), text: deviceStatus(item) },
+ ])}
+ actions={
+ <ActionPanel>
+ <Action
+ title="Pair Device"
+ icon={Icon.Link}
+ onAction={() => {
+ setLoading(true);
+ connect.pairDevice(item.id).then(() => {
+ setLoading(false);
+ });
+ refreshDevices();
+ }}
+ />
+ </ActionPanel>
+ }
+ />
+ ))}
+ </List.Section>
+ </List>
+ );
+}
+
+// TODO: implement all types
+function DeviceActions(props: { device: KDEDevice; connect: KDEConnect }) {
+ const { pop } = useNavigation();
+ const [sendType, setSendType] = useState<SendType>(SendType.Text);
+ const setSendTypeCast = (string: string) => {
+ setSendType(string as SendType);
+ };
+
+ interface SendData {
+ destination?: string;
+ content: string;
+ }
+
+ const textField = () => {
+ switch (sendType) {
+ case SendType.SMS:
+ return (
+ <React.Fragment>
+ <Form.TextField id="destination" title="Phone Number" />
+ <Form.TextArea id="content" title="Content" />
+ </React.Fragment>
+ );
+
+ default:
+ return <Form.TextArea id="content" title="Content" />;
+ }
+ };
+
+ const submitAction = (values: SendData) => {
+ switch (sendType) {
+ case SendType.SMS:
+ props.connect.sendSMS(values.destination as string, values.content).then(pop);
+ break;
+
+ case SendType.URL:
+ props.connect.share(values.content).then(pop);
+ break;
+
+ default:
+ props.connect.sendText(values.content).then(pop);
+ }
+ };
Greptile
greptile
style: Missing error handling for failed operations. Consider using try/catch and showFailureToast from @raycast/utils.
suggested fix
+ const submitAction = async (values: SendData) => {
+ try {
switch (sendType) {
case SendType.SMS:
+ await props.connect.sendSMS(values.destination as string, values.content);
break;
case SendType.URL:
+ await props.connect.share(values.content);
break;
default:
+ await props.connect.sendText(values.content);
}
+ pop();
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to send content" });
}
};
diff block
+import { ActionPanel, Action, Icon, List, LocalStorage, Form, useNavigation } from "@raycast/api";
+import React, { useEffect, useState } from "react";
+import { KDEConnect, KDEDevice } from "./device";
+import { SendType, SendTypeAllCases, appExists, startApp } from "./connector";
+import { StorageKey } from "./storage";
+import GetKDEConnect from "./getKDEConnect";
+
+const connect = new KDEConnect();
+
+export default function Command() {
+ const [loading, setLoading] = useState<boolean>(true);
+ const [appOk, setAppOk] = useState<boolean | undefined>();
+ const [devices, setDevices] = useState<KDEDevice[]>([]);
+ const [favouriteDevice, setFavouriteDevice] = useState<string | undefined>();
+
+ const { push } = useNavigation();
+
+ const refreshDevices = async () => {
+ setLoading(true);
+ await startApp();
+ console.log("App ready");
+ const discoveredDevices = await connect.listDevices();
+ setDevices(discoveredDevices);
+ setLoading(false);
+ };
+
+ useEffect(() => {
+ setAppOk(appExists());
+ if (!appOk) {
+ return;
+ }
+ }, []);
+
+ useEffect(() => {
+ if (appOk) {
+ LocalStorage.getItem(StorageKey.pairedDevices).then((value) => {
+ if (loading && value) {
+ const devices = JSON.parse(value as string) as KDEDevice[];
+ setDevices(devices);
+ }
+ });
+ LocalStorage.getItem(StorageKey.favouriteDevice).then((value) => {
+ if (loading && value) {
+ const device = value as string;
+ setFavouriteDevice(device);
+ }
+ });
+ refreshDevices();
+ }
+ }, [appOk]);
+
+ const deviceStatus = (item: KDEDevice): string => {
+ return item.paired ? (item.reachable ? "Connected" : "Unreachable") : "Not Paired";
+ };
+
+ const deviceStatusIcon = (item: KDEDevice): Icon => {
+ return item.paired ? (item.reachable ? Icon.Bolt : Icon.BoltDisabled) : Icon.Link;
+ };
+
+ const setFavourite = (device?: KDEDevice) => {
+ if (!device) {
+ LocalStorage.removeItem(StorageKey.favouriteDevice);
+ } else {
+ LocalStorage.setItem(StorageKey.favouriteDevice, device.id);
+ }
+ setFavouriteDevice(device?.id);
+ };
+
+ if (appOk === false) {
+ return <GetKDEConnect />;
+ }
+
+ return (
+ <List isLoading={loading}>
+ <List.Section title="Paired Devices">
+ {devices
+ .filter((entry) => entry.paired)
+ .map((item) => (
+ <List.Item
+ key={item.id}
+ icon={Icon.Mobile}
+ title={item.name}
+ subtitle={item.id}
+ accessories={(item.id === favouriteDevice ? [{ icon: Icon.Star, text: "Favourite" }] : []).concat([
+ { icon: deviceStatusIcon(item), text: deviceStatus(item) },
+ ])}
+ actions={
+ <ActionPanel>
+ <Action
+ title="Send…"
+ icon={Icon.ArrowUpCircleFilled}
+ onAction={() => {
+ connect.deviceID = item.id;
+ push(<DeviceActions device={item} connect={connect} />);
+ }}
+ />
+ {item.id === favouriteDevice ? (
+ <Action
+ title="Unset Favourite"
+ icon={Icon.StarDisabled}
+ onAction={() => {
+ setFavourite(undefined);
+ }}
+ />
+ ) : (
+ <Action
+ title="Set Favourite"
+ icon={Icon.Star}
+ onAction={() => {
+ setFavourite(item);
+ }}
+ />
+ )}
+ <Action
+ title="Ping"
+ icon={Icon.Network}
+ onAction={() => {
+ connect.ping(item.id);
+ }}
Greptile
greptile
style: Missing error handling for ping operation. Consider wrapping in try/catch with showFailureToast
suggested fix
+ try {
+ await connect.ping(item.id);
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to ping device" });
}
}}
diff block
+import { Form, LocalStorage, ActionPanel, Action } from "@raycast/api";
+import { TRIGGER_FILTERS_KEY } from "../utils/constants";
+import { useEffect, useState } from "react";
+import { getTags } from "../utils/n8n-api-utils";
+
+export default function TriggerFilterForm() {
+ const [selectedTags, setSelectedTags] = useState<string[]>([]);
+ const [availableTags, setAvailableTags] = useState<string[]>([]);
+
+ useEffect(() => {
+ async function loadFilters() {
+ const storedTags = await LocalStorage.getItem<string[]>(TRIGGER_FILTERS_KEY);
+ const tags = await getTags();
+ setAvailableTags(tags);
+ setSelectedTags(storedTags || []);
+ }
+ loadFilters();
+ }, []);
Greptile
greptile
logic: missing error handling for API and storage operations
diff block
+import { Action, ActionPanel, Color, Icon, LocalStorage, showHUD } from "@raycast/api";
+import { Dispatch, SetStateAction } from "react";
+import { ActionOpenPreferences } from "./action-open-preferences";
+import { Workflow } from "../types/types";
+import { executeWorkFlowsCLI, triggerWorkFlowsCLI } from "../utils/n8n-cli-utils";
+import { LocalStorageKey } from "../utils/constants";
+
+export function ActionOnWorkflow(props: {
+ workflow: Workflow;
+ setRefresh: Dispatch<SetStateAction<number>>;
+ setRefreshDetail: Dispatch<SetStateAction<number>>;
+ showDetail: boolean;
+}) {
+ const { workflow, setRefresh, setRefreshDetail, showDetail } = props;
+ return (
+ <ActionPanel>
+ <Action
+ icon={{ source: "list-icon.svg", tintColor: Color.PrimaryText }}
+ title={"Execute Workflow"}
+ onAction={() => {
+ showHUD("Workflow executed").then();
+ executeWorkFlowsCLI(workflow.id).then(async (r) => {
+ await showHUD(r);
+ });
Greptile
greptile
logic: missing error handling for executeWorkFlowsCLI - should use showFailureToast from @raycast/utils for better error feedback
suggested fix
executeWorkFlowsCLI(workflow.id).then(async (r) => {
await showHUD(r);
+ }).catch((error) => showFailureToast(error, { title: "Failed to execute workflow" }));
diff block
+import { Color, getPreferenceValues, List, showToast, Toast, Action, ActionPanel, Icon, LocalStorage } from "@raycast/api";
+import { useState, useEffect } from "react";
+import { ActionOnWorkflow } from "./components/action-on-workflow"; // Re-use the same actions
+import { EmptyView } from "./components/empty-view";
+import { DetailView } from "./components/detail-view";
+import { getAllWorkflowsAPI, triggerWebhook } from "./utils/n8n-api-utils"; // Import triggerWebhook
+import { Workflow } from "./types/types";
+import { getIsShowDetail } from "./hooks/hooks";
+import { getWebhookDetails } from "./utils/workflow-utils"; // Removed getWebhookUrl import
+import SaveCommandForm from "./components/SaveCommandForm";
+import { ActionOpenPreferences } from "./components/action-open-preferences";
+import TriggerFilterForm from "./components/TriggerFilterForm";
+import ResetStorageForm from "./components/ResetStorageForm";
+import { resetAllStorageData } from "./utils/reset-utils";
+import { TRIGGER_FILTERS_KEY, FILTER_APPLIED_INDICATOR } from "./utils/constants";
+
+// Define the preferences interface matching package.json
+interface Preferences {
+ instanceUrl: string;
+ apiKey: string;
+}
+
+// Helper to get workflow URL (moved outside component)
+function getWorkflowUrl(instanceUrl: string, workflowId: number): string {
+ const baseUrl = instanceUrl.endsWith('/') ? instanceUrl.slice(0, -1) : instanceUrl;
+ return `${baseUrl}/workflow/${workflowId}`;
+}
+
+// Helper to get webhook URL (moved outside component)
+function getWebhookUrl(instanceUrl: string, path: string): string {
+ const baseUrl = instanceUrl.endsWith('/') ? instanceUrl.slice(0, -1) : instanceUrl;
+ const webhookPath = path.startsWith('/') ? path : `/${path}`;
+ return `${baseUrl}/webhook${webhookPath}`; // Assumes production URL structure
+}
+
+
+export default function SearchWebhookWorkflowsCommand() {
+ const { instanceUrl, apiKey } = getPreferenceValues<Preferences>();
+
+ const [filteredWorkflows, setFilteredWorkflows] = useState<Workflow[]>([]); // Workflows after filtering
+ const [loading, setLoading] = useState<boolean>(true);
+ const [error, setError] = useState<string | null>(null);
+ const [refresh, setRefresh] = useState<number>(0); // To trigger manual refresh
+ const [refreshDetail, setRefreshDetail] = useState<number>(0);
+ const { showDetail } = getIsShowDetail(refreshDetail);
+ const [activeFilters, setActiveFilters] = useState<string[]>([]); // Store loaded filters
+
+ useEffect(() => {
+ async function loadAndFilterWorkflows() {
+ if (!instanceUrl || !apiKey) {
+ setError("Missing API Credentials");
+ setLoading(false);
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "Missing Preferences",
+ message: "Please set your n8n Instance URL and API Key in the command preferences.",
+ });
+ return;
+ }
+
+ setLoading(true);
+ setError(null);
+ try {
+ // 1. Load saved filters with validation
+ const storedFilters = await LocalStorage.getItem<string>(TRIGGER_FILTERS_KEY);
+ let currentFilters: string[] = [];
+
+ if (storedFilters) {
+ try {
+ const parsedFilters = JSON.parse(storedFilters);
+
+ // Validate filters are in the expected format
+ if (Array.isArray(parsedFilters)) {
+ // Filter out non-string values
+ currentFilters = parsedFilters.filter(item => typeof item === 'string');
+
+ // If we found invalid items, save the cleaned version back
+ if (currentFilters.length !== parsedFilters.length) {
+ console.warn(`Found ${parsedFilters.length - currentFilters.length} invalid filter items, cleaning up`);
+ await LocalStorage.setItem(TRIGGER_FILTERS_KEY, JSON.stringify(currentFilters));
+ }
+ } else {
+ console.warn("Saved filters are not in array format, resetting");
+ await LocalStorage.removeItem(TRIGGER_FILTERS_KEY);
+ }
+ } catch (parseError) {
+ console.error("Failed to parse saved filters:", parseError);
+ await LocalStorage.removeItem(TRIGGER_FILTERS_KEY);
+ }
+ }
+
+ setActiveFilters(currentFilters); // Update state for UI indicator
+
+ // 2. Fetch all workflows
+ const fetchedWorkflows = await getAllWorkflowsAPI();
+
+ // 3. First filter for webhook triggers
+ const webhookWorkflows = fetchedWorkflows.filter(wf => getWebhookDetails(wf) !== null);
+
+ // 4. Then apply tag filters if any
+ if (currentFilters.length > 0) {
+ const workflowsMatchingFilters = webhookWorkflows.filter(wf =>
+ wf.tags?.some(tag => currentFilters.includes(tag.name)) ?? false
+ );
+ setFilteredWorkflows(workflowsMatchingFilters);
+ } else {
+ // No filters set, show all webhook workflows
+ setFilteredWorkflows(webhookWorkflows);
+ }
+
+ } catch (e) {
+ setError(e instanceof Error ? e.message : "Failed to fetch workflows or apply filters");
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ loadAndFilterWorkflows();
+ }, [refresh, instanceUrl, apiKey]); // Re-run on refresh or credential change
+
+ // Separate active and inactive based on the *filtered* list
+ const activeWorkflows = filteredWorkflows.filter(w => w.active);
+ const inactiveWorkflows = filteredWorkflows.filter(w => !w.active);
+
+ const listTitle = activeFilters.length > 0 ? `Triggerable Workflows${FILTER_APPLIED_INDICATOR}` : "Triggerable Workflows";
+
+ if (error && !loading) {
+ return (
+ <List>
+ {/* Use the updated EmptyView structure for errors */}
+ <EmptyView
+ title="Failed to Load Workflows"
+ extensionPreferences={false} // Don't show default pref action
+ actions={ // Pass custom actions
+ <ActionPanel>
+ <ActionOpenPreferences />
+ <Action title="Retry" icon={Icon.Repeat} onAction={() => setRefresh(Date.now())} />
+ <Action.Push
+ title="Set Trigger Filters..."
+ icon={Icon.Filter}
+ target={<TriggerFilterForm />}
+ shortcut={{ modifiers: ["cmd", "shift"], key: "f" }}
+ />
+ </ActionPanel>
+ }
+ />
+ </List>
+ );
+ }
+
+ return (
+ <List
+ isLoading={loading}
+ isShowingDetail={showDetail && filteredWorkflows.length > 0}
+ searchBarPlaceholder={"Search triggerable webhook workflows"}
+ navigationTitle={listTitle} // Show filter indicator in title
+ searchBarAccessory={
+ <List.Dropdown
+ tooltip="Actions"
+ storeValue={false}
+ onChange={(value) => {
+ if (value === "reset") {
+ (async () => {
+ try {
+ await resetAllStorageData();
+ setRefresh(Date.now()); // Refresh after reset
+ } catch (error) {
+ console.error("Failed to reset storage:", error);
+ }
Greptile
greptile
logic: Missing error handling toast for storage reset failure
suggested fix
} catch (error) {
console.error("Failed to reset storage:", error);
await showToast({
style: Toast.Style.Failure,
+ title: "Failed to Reset Storage",
+ message: error instanceof Error ? error.message : String(error)
});
}
diff block
+const { loadEnvConfig } = require("@next/env");
+const git = require("isomorphic-git")
+const http = require("isomorphic-git/http/node")
+const fs = require("fs")
+const path = require("path")
+const os = require('os')
+
+loadEnvConfig(process.cwd());
+
+const token = process.env.GH_TOKEN;
+const url = process.env.DATA_REPOSITORY;
+
+if (!url) {
+ throw new Error("'DATA_REPOSITORY' must be defined as environment variable.");
+}
+
+if (!token) {
+ throw new Error("'GH_TOKEN' must be defined as environment variable.");
+}
+
+
+function getContentPath() {
+ const contentDir = '.content';
+ if (process.env.VERCEL) {
+ return path.join(os.tmpdir(), contentDir);
+ }
+
+ return path.join(process.cwd(), contentDir);
+}
+
+const auth = { username: "x-access-token", password: token };
+const dest = getContentPath();
+
+async function main() {
+ await fs.promises.mkdir(dest, { recursive: true });
+
+ await git.clone({
+ onAuth: () => auth,
+ fs,
+ http,
+ url,
+ dir: dest,
+ singleBranch: true,
+ });
+}
+
+main()
Greptile
greptile
logic: Missing error handling for main(). Unhandled promise rejection could crash the process.
suggested fix
+main().catch(err => {
+ console.error('Failed to clone repository:', err);
+ process.exit(1);
});
diff block
+import DOMPurify from "isomorphic-dompurify";
+
+/* -------------------------------------------------------------------------- */
+/* String Utilities */
+/* -------------------------------------------------------------------------- */
+
+/**
+ * Inserts spaces between camelCase words.
+ *
+ * @param text - Input camelCase string.
+ * @returns String with spaces inserted.
+ *
+ * @example
+ * addSpaceInCamelCase("camelCase") // => "camel Case"
+ */
+export const addSpaceInCamelCase = (text: string): string => {
+ if (!text) return "";
+ return text.replace(/([a-z])([A-Z])/g, "$1 $2");
+};
+
+/**
+ * Replaces underscores with spaces in snake_case strings.
+ *
+ * @param text - Input snake_case string.
+ * @returns String with underscores replaced.
+ *
+ * @example
+ * replaceUnderscores("snake_case_string") // => "snake case string"
+ */
+export const replaceUnderscores = (text: string): string => {
+ return text.replace(/_/g, " ");
+};
+
+/**
+ * Truncates a string to a maximum length, appending ellipsis if needed.
+ *
+ * @param text - Input string.
+ * @param maxLength - Maximum allowed length.
+ * @returns Truncated string.
+ *
+ * @example
+ * truncateText("Hello world!", 5) // => "Hello..."
+ */
+export const truncateText = (text: string, maxLength: number): string => {
+ if (!text) return "";
+ return text.length > maxLength ? `${text.substring(0, maxLength)}...` : text;
+};
+
+/**
+ * Randomly shuffles characters in a string.
+ *
+ * @param text - Input string.
+ * @returns Shuffled string.
+ *
+ * @example
+ * shuffleString("abc") // => "cab" or "bca" etc.
+ */
+export const shuffleString = (text: string): string => {
+ return text
+ .split("")
+ .sort(() => Math.random() - 0.5)
+ .join("");
+};
+
+/**
+ * Returns the initials of a string (first letter of first and second word).
+ *
+ * @param text - Input string.
+ * @returns Initials.
+ *
+ * @example
+ * getInitials("John Doe") // => "JD"
+ */
+export const getInitials = (text: string): string => {
+ const words = text.trim().split(" ");
+ return words.length > 1 ? `${words[0][0]}${words[1][0]}` : `${words[0][0]}`;
+};
+
+/**
+ * Capitalizes the first letter of a string.
+ *
+ * @param text - Input string.
+ * @returns Capitalized string.
+ *
+ * @example
+ * capitalizeFirstLetter("hello") // => "Hello"
+ */
+export const capitalizeFirstLetter = (text: string): string => {
+ if (!text) return "";
+ return text.charAt(0).toUpperCase() + text.slice(1);
+};
+
+/* -------------------------------------------------------------------------- */
+/* Clipboard Utilities */
+/* -------------------------------------------------------------------------- */
+
+/**
+ * Copies given text to the user's clipboard.
+ *
+ * @param text - Text to copy.
+ * @returns Promise resolved when the text is copied.
+ *
+ * @example
+ * await copyTextToClipboard("Hello World!")
+ */
+export const copyTextToClipboard = async (text: string): Promise<void> => {
+ if (typeof navigator === "undefined" || !navigator.clipboard) {
+ console.error("Clipboard API not available");
+ return;
+ }
+ try {
+ await navigator.clipboard.writeText(text);
+ } catch (error) {
+ console.error("Failed to copy text:", error);
+ }
+};
+
+/**
+ * Copies a full URL (origin + path) to clipboard.
+ *
+ * @param path - Path to append to the current window origin.
+ * @returns Promise resolved when the URL is copied.
+ *
+ * @example
+ * await copyUrlToClipboard("/profile/123")
+ */
+export const copyUrlToClipboard = async (path: string): Promise<void> => {
+ const origin = typeof window !== "undefined" ? window.location.origin : "";
+ const fullUrl = new URL(path, origin).toString();
+ await copyTextToClipboard(fullUrl);
+};
Greptile
greptile
logic: Missing error handling for invalid URLs and relative paths. URL constructor will throw for malformed URLs
diff block
/>
</div>
- <div className="flex flex-col space-y-3">
- <div className="flex flex-col space-y-2">
- <FileUploadSection
- disabled={isUploadingFile || isCreatingFileFromLink}
- onUpload={(files: File[]) => {
- setIsUploadingFile(true);
- setUploadStartTime(Date.now()); // Record start time
-
- // Add files to uploading files state
-
- // Start the refresh interval to simulate progress
- startRefreshInterval();
-
- // Convert File[] to FileList for addUploadedFileToContext
- const fileListArray = Array.from(files);
- const fileList = new DataTransfer();
- fileListArray.forEach((file) => fileList.items.add(file));
-
- addUploadedFileToContext(fileList.files)
- .then(() => refreshFolders())
- .finally(() => {
- setIsUploadingFile(false);
- });
- }}
- onUrlUpload={async (url: string) => {
- setIsCreatingFileFromLink(true);
- setUploadStartTime(Date.now()); // Record start time
-
- // Add URL to uploading files
- setUploadingFiles((prev) => [
- ...prev,
- { name: url, progress: 0 },
- ]);
-
- // Start the refresh interval to simulate progress
- startRefreshInterval();
-
- try {
- const response: FileResponse[] =
- await createFileFromLink(url, -1);
-
- if (response.length > 0) {
- // Extract domain from URL to help with detection
- const urlObj = new URL(url);
-
- const createdFile: FileResponse = response[0];
- addSelectedFile(createdFile);
- // Make sure to remove the uploading file indicator when done
- markFileComplete(url);
- }
-
- await refreshFolders();
- } catch (e) {
- console.error("Error creating file from link:", e);
- // Also remove the uploading indicator on error
+ {/* Bottom section: fixed height, doesn't flex */}
+ <div className="flex-none py-2">
+ <FileUploadSection
+ disabled={isUploadingFile || isCreatingFileFromLink}
+ onUpload={(files: File[]) => {
+ setIsUploadingFile(true);
+ setUploadStartTime(Date.now()); // Record start time
+
+ // Start the refresh interval to simulate progress
+ startRefreshInterval();
+
+ // Convert File[] to FileList for addUploadedFileToContext
+ const fileListArray = Array.from(files);
+ const fileList = new DataTransfer();
+ fileListArray.forEach((file) => fileList.items.add(file));
+
+ addUploadedFileToContext(fileList.files)
+ .then(() => refreshFolders())
+ .finally(() => {
+ setIsUploadingFile(false);
+ });
+ }}
+ onUrlUpload={async (url: string) => {
+ setIsCreatingFileFromLink(true);
+ setUploadStartTime(Date.now()); // Record start time
+
+ // Add URL to uploading files
+ setUploadingFiles((prev) => [
+ ...prev,
+ { name: url, progress: 0 },
+ ]);
+
+ // Start the refresh interval to simulate progress
+ startRefreshInterval();
+
+ try {
+ const response: FileResponse[] = await createFileFromLink(
+ url,
+ -1
+ );
+
+ if (response.length > 0) {
+ // Extract domain from URL to help with detection
+ const urlObj = new URL(url);
+
+ const createdFile: FileResponse = response[0];
+ addSelectedFile(createdFile);
+ // Make sure to remove the uploading file indicator when done
markFileComplete(url);
- } finally {
- setIsCreatingFileFromLink(false);
}
Greptile
greptile
logic: Missing error handling for empty response array - could lead to undefined behavior
suggested fix
const response: FileResponse[] = await createFileFromLink(
url,
-1
);
if (response.length > 0) {
// Extract domain from URL to help with detection
const urlObj = new URL(url);
const createdFile: FileResponse = response[0];
addSelectedFile(createdFile);
// Make sure to remove the uploading file indicator when done
+ markFileComplete(url);
+ } else {
+ throw new Error('No file was created from the provided link');
+ }
diff block
+import { useState } from "react";
+import { Action, ActionPanel, Form, Icon, openExtensionPreferences, useNavigation } from "@raycast/api";
+import { OriginOption, PLACE_TYPES } from "../utils/types";
+import { useNearbyPlaces } from "../hooks/use-nearby-places";
+import { PlaceSearchResults } from "./place-search-results";
+import { PlaceDetailView } from "./place-detail-view";
+import { getUnitSystem, getDefaultRadius } from "../utils/common";
+
+export function NearbyPlacesSearchForm() {
+ const { push } = useNavigation();
+ const { searchNearbyPlaces, isLoading } = useNearbyPlaces();
+
+ const [placeType, setPlaceType] = useState<string>("restaurant");
+ const [origin, setOrigin] = useState<OriginOption>(OriginOption.Home);
+
+ // Handle origin change with proper type conversion
+ const handleOriginChange = (newValue: string) => {
+ setOrigin(newValue as OriginOption);
+ };
+
+ // Get user's preferred unit system and default radius
+ const unitSystem = getUnitSystem();
+ const [radius, setRadius] = useState<string>(getDefaultRadius());
+
+ const [customAddress, setCustomAddress] = useState<string>("");
+
+ // Handle search submission
+ const handleSubmit = async () => {
+ const places = await searchNearbyPlaces(placeType, origin, customAddress, parseInt(radius, 10));
+
Greptile
greptile
logic: Missing error handling for searchNearbyPlaces. Should wrap in try/catch and show error toast if search fails.
suggested fix
const handleSubmit = async () => {
+ try {
const places = await searchNearbyPlaces(placeType, origin, customAddress, parseInt(radius, 10));
diff block
-import { INextParams } from '@app/interfaces';
import { authenticatedGuard } from '@app/services/server/guards/authenticated-guard-app';
import { editOrganizationProjectsSettingsRequest } from '@app/services/server/requests';
import { NextResponse } from 'next/server';
-export async function PUT(req: Request, { params }: INextParams) {
+export async function PUT(req: Request, { params }: { params: Promise<{ id: string }> }) {
const res = new NextResponse();
-
- if (!params.id) {
+ const id = (await params).id;
+ if (!id) {
return;
}
Greptile
greptile
logic: Missing error handling if params Promise rejects - could cause unhandled promise rejection
diff block
return result;
}
+
+ async delete(key: keyof ConfigVariables): Promise<void> {
+ if (!this.isDatabaseDriverActive) {
+ throw new Error(
+ 'Database configuration is disabled or unavailable, cannot delete configuration',
+ );
+ }
+ await this.databaseConfigDriver.delete(key);
Greptile
greptile
logic: Missing error handling for delete() method. Consider adding try/catch similar to update() method. ```suggestion + try { await this.databaseConfigDriver.delete(key); + this.logger.debug(`Deleted config variable: ${key as string}`); + } catch (error) { + this.logger.error(`Failed to delete config for ${key as string}`, error); + throw error; } ```
diff block
+import { EVER_TEAMS_API_BASE_URL } from "@ever-teams/constants";
+// types
+// services
+import { APIService } from "../api.service";
+
+/**
+ * Service class for handling authentication-related operations
+ * Provides methods for user authentication, password management, and session handling
+ * @extends {APIService}
+ */
+export class AuthService extends APIService {
+ /**
+ * Creates an instance of AuthService
+ * Initializes with the base API URL
+ */
+ constructor(BASE_URL?: string) {
+ super(BASE_URL || EVER_TEAMS_API_BASE_URL);
+ }
+
+ async generateUniqueCode(data: { email: string }): Promise<any> {}
+
+ async signOut(baseUrl: string): Promise<any> {
+ const form = document.createElement("form");
+ form.method = "POST";
+ form.action = `${baseUrl}/auth/logout/`;
+ }
Greptile
greptile
logic: Form is created but never submitted or appended to document. Also missing error handling and response handling
diff block
+import { showToast, Toast } from "@raycast/api";
+import { useState } from "react";
+import { TagForm, useTag } from "./features/tag";
+
+export default function Command() {
+ const [isLoading, setIsLoading] = useState(false);
+ const { createTag } = useTag();
+
+ async function handleSubmit(values: { name: string; color: string }) {
+ const { name, color } = values;
+
+ if (!name) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Please enter a tag name",
+ });
+ return;
+ }
+
+ if (name.length > 10) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Tag name must be 10 characters or less",
+ });
+ return;
+ }
+
+ if (!/^[a-zA-Z0-9-]+$/.test(name)) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Tag name can only contain alphanumeric characters and hyphens",
+ });
+ return;
+ }
+
+ setIsLoading(true);
+ await createTag({ name, color });
+ setIsLoading(false);
Greptile
greptile
logic: Missing error handling for createTag failure. Should use showFailureToast from @raycast/utils for consistency.
suggested fix
+ try {
await createTag({ name, color });
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to create tag" });
}
setIsLoading(false);
diff block
}
}
})
+
+export const useTelegramCallback = createMutationHook({
+ endpoint: TelegramCallbackCommand.TSQ_url,
+ bodySchema: TelegramCallbackCommand.RequestSchema,
+ responseSchema: TelegramCallbackCommand.ResponseSchema,
+ requestMethod: TelegramCallbackCommand.endpointDetails.REQUEST_METHOD,
+ rMutationParams: {
+ onSuccess: (data) => {
+ setToken({ token: data.accessToken })
+ }
+ }
+})
Greptile
greptile
logic: Missing error handling for Telegram callback - other auth methods have error notifications but this one doesn't
diff block
+from cyclomatic_complexity import CodeRiskAnalyzer
+from ASTAnalyzer import ASTLayer
+
+def prioritize_file(changed_files):
+ """
+ Prioritizes the given changed files based on risk and complexity.
+
+ Args:
+ changed_files (list): List of dictionaries containing information about
+ the changed files, including "path" and "diff".
+
+ Returns:
+ list: Sorted list of dictionaries with analysis results for each file,
+ including risk and complexity scores. The files are sorted in
+ descending order based on risk and complexity.
+ """
+ prioritized_files = []
+
+ for file_info in changed_files:
+ file_analysis = CodeRiskAnalyzer(file_info["path"], file_info["diff"]).analyze();
+ file_analysis["diff"] = file_info["diff"]
+ if file_analysis:
+ prioritized_files.append(file_analysis)
Greptile
greptile
logic: Missing error handling if file_analysis is None - could lead to KeyError when adding diff
suggested fix
if file_analysis:
             file_analysis["diff"] = file_info["diff"]
             prioritized_files.append(file_analysis)
diff block
return this.environmentConfigDriver.get(key);
}
+ async set<T extends keyof ConfigVariables>(
+ key: T,
+ value: ConfigVariables[T],
+ ): Promise<void> {
+ if (!this.isDatabaseDriverActive) {
+ throw new Error(
+ 'Database configuration is disabled or unavailable, cannot set configuration',
+ );
+ }
+
+ const metadata =
+ TypedReflect.getMetadata('config-variables', ConfigVariables) ?? {};
+ const envMetadata = metadata[key];
+
+ if (envMetadata?.isEnvOnly) {
+ throw new Error(
+ `Cannot create environment-only variable: ${key as string}`,
+ );
+ }
+
+ await this.databaseConfigDriver.set(key, value);
Greptile
greptile
logic: Missing error handling for set() method. Consider adding try/catch similar to update() method to log failures. ```suggestion + try { await this.databaseConfigDriver.set(key, value); + this.logger.debug(`Set config variable: ${key as string}`); + } catch (error) { + this.logger.error(`Failed to set config for ${key as string}`, error); + throw error; } ```
diff block
+import { LocalStorage } from "@raycast/api";
+import type { Site } from "./types";
+
+const STORAGE_KEY = "sitesXml";
+
+/**
+ * Escape XML special characters in a string.
+ */
+function escapeXML(str: string): string {
+ return str.replace(/[<>&'"]/g, (c) => {
+ switch (c) {
+ case "<":
+ return "&lt;";
+ case ">":
+ return "&gt;";
+ case "&":
+ return "&amp;";
+ case "'":
+ return "&apos;";
+ case `"`:
+ return "&quot;";
+ default:
+ return c;
+ }
+ });
+}
+
+/**
+ * Serialize a list of sites into an XML string.
+ * Omits <category> if the site is "uncategorized".
+ */
+export function sitesToXml(sites: Site[]): string {
+ const body = sites
+ .map((s) => {
+ const lines = [` <site>`, ` <name>${escapeXML(s.name)}</name>`, ` <url>${escapeXML(s.url)}</url>`];
+ if (s.category && s.category !== "uncategorized") {
+ lines.push(` <category>${escapeXML(s.category)}</category>`);
+ }
+ lines.push(` </site>`);
+ return lines.join("\n");
+ })
+ .join("\n");
+
+ return `<?xml version="1.0"?>\n<sites>\n${body}\n</sites>`;
+}
+
+/**
+ * Parse an XML string into an array of Site objects.
+ * Blank or missing <category> yields "uncategorized".
+ */
+export function parseSitesXml(xml: string): Site[] {
+ const siteBlocks = Array.from(xml.matchAll(/<site>([\s\S]*?)<\/site>/g));
+ return siteBlocks.map((m) => {
+ const block = m[1];
+ const nameMatch = block.match(/<name>(.*?)<\/name>/s);
+ const urlMatch = block.match(/<url>(.*?)<\/url>/s);
+ const catMatch = block.match(/<category>(.*?)<\/category>/s);
+ const name = nameMatch?.[1].trim() || "";
+ const url = urlMatch?.[1].trim() || "";
+ const categoryRaw = catMatch?.[1].trim() || "";
+ const category = categoryRaw || "uncategorized";
+ return { name, url, category };
+ });
+}
+
+/**
+ * Return a new array of sites sorted alphabetically by name.
+ */
+export function sortSites(sites: Site[]): Site[] {
+ return sites.slice().sort((a, b) => a.name.localeCompare(b.name));
+}
+
+/**
+ * Extract a sorted list of unique, non-empty categories from the sites.
+ * Excludes "uncategorized".
+ */
+export function getCategories(sites: Site[]): string[] {
+ return Array.from(new Set(sites.map((s) => s.category).filter((c) => c && c !== "uncategorized"))).sort();
+}
+
+/**
+ * Load sites from LocalStorage, initializing to empty XML if needed.
+ */
+export async function loadSites(): Promise<Site[]> {
+ // getItem<T>() returns Promise<T | null | undefined>, so coerce undefined → null
+ const raw = (await LocalStorage.getItem<string>(STORAGE_KEY)) ?? null;
+
+ if (!raw || !raw.trim().startsWith("<?xml")) {
+ const empty = `<?xml version="1.0"?><sites></sites>`;
+ await LocalStorage.setItem(STORAGE_KEY, empty).catch(() => {});
+ return [];
+ }
+
+ try {
+ const parsed = parseSitesXml(raw);
+ return sortSites(parsed);
+ } catch (e) {
+ console.error("loadSites: parse error", e);
+ const empty = `<?xml version="1.0"?><sites></sites>`;
+ await LocalStorage.setItem(STORAGE_KEY, empty).catch(() => {});
+ return [];
+ }
+}
+
+/**
+ * Save a list of sites by serializing to XML and storing it.
+ */
+export async function saveSites(sites: Site[]): Promise<void> {
+ const sorted = sortSites(sites);
+ const xml = sitesToXml(sorted);
+ await LocalStorage.setItem(STORAGE_KEY, xml);
Greptile
greptile
logic: Missing error handling for LocalStorage.setItem. Should wrap in try-catch with showFailureToast.
suggested fix
try {
await LocalStorage.setItem(STORAGE_KEY, xml);
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to save sites" });
}
diff block
gov.msgsToPublish = nil
}
- for _, ce := range gov.chains {
+ // Iterate deterministically by accessing keys from this slice instead of the chainEntry map directly
+ for _, chainId := range gov.chainIds {
+ ce, ok := gov.chains[chainId]
+ if !ok {
+ gov.logger.Error("chainId not found in gov.chains", zap.Stringer("chainId", chainId))
+
+ }
Greptile
greptile
logic: Missing error handling after chainId not found error. This could lead to a nil pointer dereference.
suggested fix
if !ok {
gov.logger.Error("chainId not found in gov.chains", zap.Stringer("chainId", chainId))
+ continue
}
diff block
+import { useEffect, useState } from "react";
+import { ActionPanel, Action, Grid } from "@raycast/api";
+import path from "path";
+import fs from "fs";
+
+interface AzureIcon {
+ name: string;
+ iconPath: string;
+ folder: string;
+}
+
+export default function Command() {
+ const [columns, setColumns] = useState(5);
+ const [isLoading, setIsLoading] = useState(true);
+ const [icons, setIcons] = useState<AzureIcon[]>([]);
+
+ function formatServiceName(filename: string): string {
+ const regex = /^\d+-icon-service-(.+)\.svg$/i;
+ const match = filename.match(regex);
+ if (!match) {
+ return filename
+ .replace(/\.[^.]+$/, "")
+ .split("-")
+ .join(" ");
+ }
+ return match[1].split("-").join(" ");
+ }
+
+ useEffect(() => {
+ const fetchIcons = async () => {
+ const _icons: AzureIcon[] = [];
+ const dir = await fs.promises.opendir(path.resolve(__dirname, "assets/icons"));
+ for await (const dirent of dir) {
+ const svgs = await fs.promises.opendir(path.resolve(__dirname, `assets/icons/${dirent.name}`));
+ for await (const svg of svgs) {
Greptile
greptile
style: Missing error handling for file system operations that could fail ```suggestion + try { const dir = await fs.promises.opendir(path.resolve(__dirname, "assets/icons")); for await (const dirent of dir) { const svgs = await fs.promises.opendir(path.resolve(__dirname, `assets/icons/${dirent.name}`)); for await (const svg of svgs) { ```
diff block
+import { withCache } from "@raycast/utils";
+import { callOpenAIChannel, callOpenAIThread } from "./openaiApi";
+import { getChannelIdByName, getAllUsers, fetchFullThread, getThreadsForChannel } from "./slackApi";
+
+let userMap = {};
+
+// ──────────────────────────────────────────────────────────────
+// Shared helpers
+
+// Turn a thread URL | ts → {channelId, threadTs}
+function parseThread(input) {
+ // URL pattern: https://…/archives/C12345/p1714445821123456
+ if (/^https?:\/\//i.test(input)) {
+ const m = input.match(/archives\/([A-Z0-9]+)\/p(\d{16})/);
+ if (!m) throw new Error("Unrecognised Slack thread URL");
+ return { channelId: m[1], threadTs: `${Number(m[2]) / 1000000}` };
+ }
+ // raw ts => we still need channelId
+ throw new Error("Unrecognised Slack thread URL");
+}
+
+async function loadAllUsers() {
+ userMap = await withCache(getAllUsers, {
+ maxAge: 60 * 60 * 1000 * 24 * 14, // 14 days in ms
+ })();
+ return userMap;
+}
+
+function getUserName(uid) {
+ if (!uid) return "system";
+ const u = userMap[uid];
+ return u?.profile?.display_name?.trim() || u?.profile?.real_name?.trim() || uid;
+}
+
+function replaceUserMentions(text = "") {
+ return text.replace(/<@(\w+)>/g, (_, id) => `@${getUserName(id)}`);
+}
+
+function buildPromptBody(messages, itemIdx = 1) {
+ return [
+ `Thread ${itemIdx}:`,
+ ...messages.map((m) => {
+ const name = getUserName(m.user);
+ const txt = replaceUserMentions(m.text?.replace(/\n/g, " ") ?? "");
+ return `- @${name}: ${txt}`;
+ }),
+ "",
+ ].join("\n");
+}
+
+// ──────────────────────────────────────────────────────────────
+
+// Public API
+export async function summarizeChannel(channelName, days = 7, customPrompt) {
+ await loadAllUsers();
+
+ const channelId = await getChannelIdByName(channelName);
+
+ const oldestTs = Math.floor(Date.now() / 1000) - days * 24 * 60 * 60;
+ const result = await getThreadsForChannel(channelId, oldestTs);
+ if (!result) throw new Error("No threads found in the channel in the specified duration.");
+
+ const promptBody = result.bundles.map((b, i) => buildPromptBody(b.messages, i + 1)).join("\n");
+
+ return callOpenAIChannel(promptBody, channelName, customPrompt);
+}
+
+export async function summarizeThread(rawInput, customPrompt) {
+ await loadAllUsers();
+
+ const { channelId, threadTs } = parseThread(rawInput);
+ const messages = await fetchFullThread(channelId, threadTs);
+ const promptBody = buildPromptBody(messages);
Greptile
greptile
logic: Missing error handling if fetchFullThread fails. Should wrap in try/catch and use showFailureToast
suggested fix
+ try {
const { channelId, threadTs } = parseThread(rawInput);
const messages = await fetchFullThread(channelId, threadTs);
const promptBody = buildPromptBody(messages);
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to fetch thread messages" });
+ throw error;
}
diff block
+import { withCache } from "@raycast/utils";
+import { callOpenAIChannel, callOpenAIThread } from "./openaiApi";
+import { getChannelIdByName, getAllUsers, fetchFullThread, getThreadsForChannel } from "./slackApi";
+
+let userMap = {};
+
+// ──────────────────────────────────────────────────────────────
+// Shared helpers
+
+// Turn a thread URL | ts → {channelId, threadTs}
+function parseThread(input) {
+ // URL pattern: https://…/archives/C12345/p1714445821123456
+ if (/^https?:\/\//i.test(input)) {
+ const m = input.match(/archives\/([A-Z0-9]+)\/p(\d{16})/);
+ if (!m) throw new Error("Unrecognised Slack thread URL");
+ return { channelId: m[1], threadTs: `${Number(m[2]) / 1000000}` };
+ }
+ // raw ts => we still need channelId
+ throw new Error("Unrecognised Slack thread URL");
+}
+
+async function loadAllUsers() {
+ userMap = await withCache(getAllUsers, {
+ maxAge: 60 * 60 * 1000 * 24 * 14, // 14 days in ms
+ })();
+ return userMap;
+}
+
+function getUserName(uid) {
+ if (!uid) return "system";
+ const u = userMap[uid];
+ return u?.profile?.display_name?.trim() || u?.profile?.real_name?.trim() || uid;
+}
+
+function replaceUserMentions(text = "") {
+ return text.replace(/<@(\w+)>/g, (_, id) => `@${getUserName(id)}`);
+}
+
+function buildPromptBody(messages, itemIdx = 1) {
+ return [
+ `Thread ${itemIdx}:`,
+ ...messages.map((m) => {
+ const name = getUserName(m.user);
+ const txt = replaceUserMentions(m.text?.replace(/\n/g, " ") ?? "");
+ return `- @${name}: ${txt}`;
+ }),
+ "",
+ ].join("\n");
+}
+
+// ──────────────────────────────────────────────────────────────
+
+// Public API
+export async function summarizeChannel(channelName, days = 7, customPrompt) {
+ await loadAllUsers();
+
+ const channelId = await getChannelIdByName(channelName);
+
+ const oldestTs = Math.floor(Date.now() / 1000) - days * 24 * 60 * 60;
+ const result = await getThreadsForChannel(channelId, oldestTs);
+ if (!result) throw new Error("No threads found in the channel in the specified duration.");
Greptile
greptile
logic: Missing error handling if getChannelIdByName fails. Should wrap in try/catch and use showFailureToast
suggested fix
+ try {
const channelId = await getChannelIdByName(channelName);
const oldestTs = Math.floor(Date.now() / 1000) - days * 24 * 60 * 60;
const result = await getThreadsForChannel(channelId, oldestTs);
if (!result) throw new Error("No threads found in the channel in the specified duration.");
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to fetch channel threads" });
+ throw error;
}
diff block
if (!p.uId) return cb && cb({ error: 'not logged in' });
postModel.fetchPostById(p.pId, function (r) {
if (!r) return cb && cb({ error: 'missing track' });
- const lastFmSessionKey = (mongodb.usernames[p.uId].lastFm || {}).sk;
- lastFm.scrobble2(
- r && r.name,
- lastFmSessionKey,
- p.uId == r.uId,
- p.timestamp,
- function (res) {
- //console.log("-> last fm response", res);
- cb && cb(res);
- },
- );
+ userModel.fetchAndProcessUserById(p.uId).then((user) => {
+ // @ts-ignore
+ const lastFmSessionKey = user && user.lastFm ? user.lastFm.sk : null;
+ lastFm.scrobble2(
+ r && r.name,
+ lastFmSessionKey,
+ p.uId == r.uId,
+ p.timestamp,
+ function (res) {
+ //console.log("-> last fm response", res);
+ cb && cb(res);
+ },
+ );
+ });
});
Greptile
greptile
logic: Missing error handling for the Promise chain. Add catch block to handle potential user fetch failures.
suggested fix
userModel.fetchAndProcessUserById(p.uId).then((user) => {
// @ts-ignore
const lastFmSessionKey = user && user.lastFm ? user.lastFm.sk : null;
lastFm.scrobble2(
r && r.name,
lastFmSessionKey,
p.uId == r.uId,
p.timestamp,
function (res) {
//console.log("-> last fm response", res);
cb && cb(res);
},
);
+ })
+ .catch(err => {
+ console.error('Failed to fetch user for lastFm scrobble:', err);
+ cb && cb({ error: 'Failed to process user data' });
});
});
diff block
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <unistd.h>
+#include <sys/time.h>
+#include <time.h>
+#include <string.h> // for strtok
+#include <getopt.h>
+#include <limits.h>
+#include <stdbool.h>
+#include <nvml.h>
+#include "gmt-lib.h"
+
+
+// All variables are made static, because we believe that this will
+// keep them local in scope to the file and not make them persist in state
+// between Threads.
+// in any case, none of these variables should change between threads
+static unsigned int msleep_time=1000;
+static struct timespec offset;
+
+static void output_stats() {
+ struct timeval now;
+ nvmlReturn_t result;
+ unsigned int device_count;
+ nvmlDevice_t device;
+ char name[NVML_DEVICE_NAME_BUFFER_SIZE];
+// nvmlUtilization_t utilization;
+// nvmlMemory_t memory;
+ unsigned int power_usage;
+// unsigned int power_limit;
+
+ result = nvmlInit();
+ if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to initialize NVML: %s\n", nvmlErrorString(result));
+ exit(1);
+ }
+
+ result = nvmlDeviceGetCount(&device_count);
+ if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get device count: %s\n", nvmlErrorString(result));
+ nvmlShutdown();
+ exit(1);
+ }
+
+ while (1) {
+ get_adjusted_time(&now, &offset);
+
+ for (unsigned int i = 0; i < device_count; i++) {
+
+ nvmlDeviceGetHandleByIndex(i, &device);
+ nvmlDeviceGetName(device, name, sizeof(name));
Greptile
greptile
logic: Missing error handling for nvmlDeviceGetHandleByIndex and nvmlDeviceGetName calls
suggested fix
+ result = nvmlDeviceGetHandleByIndex(i, &device);
if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get device handle: %s\n", nvmlErrorString(result));
+ continue;
}
+ result = nvmlDeviceGetName(device, name, sizeof(name));
if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get device name: %s\n", nvmlErrorString(result));
+ continue;
}
diff block
if request.method in SAFE_METHODS:
return True
else:
- return can_user_edit_feature_flag(request, feature_flag)
+ return (
+ # Old access control
+ can_user_edit_feature_flag(request, feature_flag)
+ or
+ # New access control
+ (
+ self.get_user_access_level(feature_flag) == "editor"
Greptile
greptile
logic: Missing error handling if `get_user_access_level` fails. Should catch potential exceptions to avoid breaking access control.
diff block
+import { useEffect } from "react";
+import { Controller, useForm } from "react-hook-form";
+import { BsSlack } from "react-icons/bs";
+import { zodResolver } from "@hookform/resolvers/zod";
+import { z } from "zod";
+
+import { createNotification } from "@app/components/notifications";
+import {
+ Accordion,
+ AccordionContent,
+ AccordionItem,
+ AccordionTrigger,
+ Button,
+ FormControl,
+ Input
+} from "@app/components/v2";
+import { useToggle } from "@app/hooks";
+import { useUpdateServerConfig } from "@app/hooks/api";
+import { AdminIntegrationsConfig } from "@app/hooks/api/admin/types";
+
+const getCustomSlackAppCreationUrl = () =>
+ `https://api.slack.com/apps?new_app=1&manifest_json=${encodeURIComponent(
+ JSON.stringify({
+ display_information: {
+ name: "Infisical",
+ description: "Get real-time Infisical updates in Slack",
+ background_color: "#c2d62b",
+ long_description: `This Slack application is designed specifically for use with your self-hosted Infisical instance, allowing seamless integration between your Infisical projects and your Slack workspace. With this integration, your team can stay up-to-date with the latest events, changes, and notifications directly inside Slack.
+ - Notifications: Receive real-time updates and alerts about critical events in your Infisical projects. Whether it's a new project being created, updates to secrets, or changes to your team's configuration, you will be promptly notified within the designated Slack channels of your choice.
+ - Customization: Tailor the notifications to your team's specific needs by configuring which types of events trigger alerts and in which channels they are sent.
+ - Collaboration: Keep your entire team in the loop with notifications that help facilitate more efficient collaboration by ensuring that everyone is aware of important developments in your Infisical projects.
+
+ By integrating Infisical with Slack, you can enhance your workflow by combining the power of secure secrets management with the communication capabilities of Slack.`
+ },
+ features: {
+ app_home: {
+ home_tab_enabled: false,
+ messages_tab_enabled: false,
+ messages_tab_read_only_enabled: true
+ },
+ bot_user: {
+ display_name: "Infisical",
+ always_online: true
+ }
+ },
+ oauth_config: {
+ redirect_urls: [`${window.origin}/api/v1/workflow-integrations/slack/oauth_redirect`],
+ scopes: {
+ bot: ["chat:write.public", "chat:write", "channels:read", "groups:read"]
+ }
+ },
+ settings: {
+ org_deploy_enabled: false,
+ socket_mode_enabled: false,
+ token_rotation_enabled: false
+ }
+ })
+ )}`;
+
+const slackFormSchema = z.object({
+ clientId: z.string(),
+ clientSecret: z.string()
+});
+
+type TSlackForm = z.infer<typeof slackFormSchema>;
+
+type Props = {
+ adminIntegrationsConfig?: AdminIntegrationsConfig;
+};
+
+export const SlackIntegrationForm = ({ adminIntegrationsConfig }: Props) => {
+ const { mutateAsync: updateAdminServerConfig } = useUpdateServerConfig();
+ const [isSlackClientIdFocused, setIsSlackClientIdFocused] = useToggle();
+ const [isSlackClientSecretFocused, setIsSlackClientSecretFocused] = useToggle();
+
+ const {
+ control,
+ handleSubmit,
+ setValue,
+ formState: { isSubmitting, isDirty }
+ } = useForm<TSlackForm>({
+ resolver: zodResolver(slackFormSchema)
+ });
+
+ const onSubmit = async (data: TSlackForm) => {
+ await updateAdminServerConfig({
+ slackClientId: data.clientId,
+ slackClientSecret: data.clientSecret
+ });
+
+ createNotification({
+ text: "Updated admin slack configuration",
+ type: "success"
+ });
+ };
Greptile
greptile
logic: Missing error handling for updateAdminServerConfig. Failed updates should show error notification and not leave user thinking it succeeded.
suggested fix
const onSubmit = async (data: TSlackForm) => {
+ try {
await updateAdminServerConfig({
slackClientId: data.clientId,
slackClientSecret: data.clientSecret
});
createNotification({
text: "Updated admin slack configuration",
type: "success"
});
+ } catch (error) {
createNotification({
+ text: "Failed to update slack configuration",
+ type: "error"
});
}
};
diff block
}
})
-export const useBulkDeleteUsersByStatus = createPostMutationHook({
+export const useBulkDeleteUsersByStatus = createMutationHook({
endpoint: BulkDeleteUsersByStatusCommand.TSQ_url,
bodySchema: BulkDeleteUsersByStatusCommand.RequestSchema,
- responseSchema: BulkDeleteUsersByStatusCommand.ResponseSchema
+ responseSchema: BulkDeleteUsersByStatusCommand.ResponseSchema,
+ requestMethod: BulkDeleteUsersByStatusCommand.endpointDetails.REQUEST_METHOD
})
Greptile
greptile
logic: Missing error handling notifications for bulk delete by status operation ```suggestion export const useBulkDeleteUsersByStatus = createMutationHook({ + endpoint: BulkDeleteUsersByStatusCommand.TSQ_url, + bodySchema: BulkDeleteUsersByStatusCommand.RequestSchema, responseSchema: BulkDeleteUsersByStatusCommand.ResponseSchema, + requestMethod: BulkDeleteUsersByStatusCommand.endpointDetails.REQUEST_METHOD, + rMutationParams: { + onSuccess: () => { + notifications.show({ + title: 'Success', + message: 'Task added to queue successfully', + color: 'teal' + }) + }, + onError: (error) => { + notifications.show({ + title: `Bulk Delete Users By Status`, + message: + error instanceof Error ? error.message : `Request failed with unknown error.`, + color: 'red' + }) + } + } +}) ```
diff block
+# 🚀 Smithery Deployment Guide
+
+## Overview
+
+This guide explains how to deploy the Greptile MCP server on Smithery for streamable HTTP hosting.
+
+## Current Status
+
+The Greptile MCP server currently supports:
+- ✅ SSE transport
+- ✅ STDIO transport
+- ❌ Streamable HTTP (needed for optimal Smithery deployment)
+
+## Deployment Steps
+
+### 1. Add HTTP Transport Support
+
+To deploy on Smithery, we need to implement streamable HTTP transport:
+
+```python
+# Add to src/main.py
+from mcp.server.http import create_http_handler
+
+async def run_http():
+ """Run the MCP server with HTTP transport for Smithery"""
+ handler = create_http_handler(mcp)
+
+ # Smithery passes config as base64 query param
+ @handler.route("/mcp")
+ async def mcp_endpoint(request):
+ config = request.query_params.get("config")
+ if config:
+ # Decode base64 config
+ import base64
+ config_json = base64.b64decode(config).decode('utf-8')
+ # Apply configuration
+
Greptile
greptile
logic: Missing error handling for invalid base64 config data. Could crash server if malformed.
suggested fix
config = request.query_params.get("config")
if config:
# Decode base64 config
+ try:
import base64
config_json = base64.b64decode(config).decode('utf-8')
# Apply configuration
+ except Exception as e:
+ return Response(status_code=400, content={"error": "Invalid config encoding"})
diff block
+#!/bin/bash
+
+echo "Starting Supabase..."
+cd supabase
+docker compose up -d
+
+echo "Waiting for Supabase to be healthy..."
+until curl -s http://localhost:54321/rest/v1/ > /dev/null; do
+ echo "Waiting for Supabase..."
+ sleep 5
+done
+
+echo "Supabase is ready! Starting main services..."
+cd ..
Greptile
greptile
logic: Missing error handling for cd command back to parent directory
suggested fix
+cd .. || { echo "Error: failed to return to parent directory"; exit 1; }
diff block
+#!/bin/bash
+
+echo "Starting Supabase..."
+cd supabase
Greptile
greptile
logic: Missing error handling for cd command - script should exit if directory change fails
suggested fix
+cd supabase || { echo "Error: supabase directory not found"; exit 1; }
diff block
+import sqlite3
+
+conn = sqlite3.connect('database.db')
+cursor = conn.cursor()
+
+# Hardcoded credentials
+USERNAME = "admin"
+PASSWORD = "password123"
+
+def login(user, pw):
+ query = f"SELECT * FROM users WHERE username = '{user}' AND password = '{pw}'" # SQL Injection risk!
+ result = cursor.execute(query).fetchall()
+ return result
Greptile
greptile
logic: Missing error handling for database operations. Queries could fail silently
suggested fix
+ try:
result = cursor.execute(query).fetchall()
return result
+ except sqlite3.Error as e:
+ raise Exception(f"Database error: {e}")
diff block
+import sys
+import pyautogui
+from PyQt5.QtWidgets import QApplication, QLabel, QMainWindow, QVBoxLayout, QWidget, QSizePolicy
+from PyQt5.QtCore import Qt
+from screeninfo import get_monitors
+
+message = sys.argv[1]
Greptile
greptile
logic: Missing error handling for sys.argv[1]. Script will crash if no argument provided
suggested fix
+try:
message = sys.argv[1]
+except IndexError:
+ print("Error: Please provide a message as an argument")
+ sys.exit(1)
diff block
});
});
+program
+ .command('lint')
+ .description('Lint your Mastra project')
+ .option('-d, --dir <path>', 'Path to your Mastra folder')
+ .option('-r, --root <path>', 'Path to your root folder')
+ .option('-t, --tools <toolsDirs>', 'Comma-separated list of paths to tool files to include')
+ .action(async args => {
+ await lint({ dir: args.dir, root: args.root, tools: args.tools ? args.tools.split(',') : [] });
+ });
Greptile
greptile
logic: Missing error handling and analytics tracking for the lint command. Should wrap in try/catch and use trackCommandExecution like other commands.
suggested fix
.action(async args => {
+ await analytics.trackCommandExecution({
+ command: 'lint',
+ args,
+ execution: async () => {
await lint({ dir: args.dir, root: args.root, tools: args.tools ? args.tools.split(',') : [] });
+ },
+ origin,
});
});
diff block
+import { QueuePayload } from "./types";
+import { MessageProducer } from "./types";
+
+export class DualWriteProducer implements MessageProducer {
+ private primary: MessageProducer;
+ private secondary: MessageProducer;
+
+ constructor(
+ primaryProducer: MessageProducer,
+ secondaryProducer: MessageProducer
+ ) {
+ this.primary = primaryProducer;
+ this.secondary = secondaryProducer;
+ }
+
+ async sendMessages(queuePayload: QueuePayload) {
+ // Send to primary and log any errors but don't fail
+ try {
+ console.log("Sending to primary queue");
+ await this.primary.sendMessages(queuePayload);
+ } catch (error: any) {
+ console.error(`Error sending to primary queue: ${error.message}`);
+ }
+ // Always return the result from the secondary
+ console.log("Sending to secondary queue");
+ return this.secondary.sendMessages(queuePayload);
Greptile
greptile
logic: missing error handling for secondary producer failures - this could silently fail without any logging or monitoring ```suggestion try { console.log("Sending to secondary queue"); + return await this.secondary.sendMessages(queuePayload); } catch (error: any) { + console.error(`Error sending to secondary queue: ${error.message}`); + throw error; // Re-throw since this is our main return path } ```
diff block
import { ITaskStatusCreate, ITaskStatusItem } from '../../interfaces/ITaskStatus';
import { serverFetch } from '../fetch';
-export function createStatusRequest({
- datas,
- bearer_token,
- tenantId
-}: {
- datas: ITaskStatusCreate;
- bearer_token: string;
- tenantId: string;
-}) {
- return serverFetch<ITaskStatusItem>({
- path: '/task-statuses',
- method: 'POST',
- body: datas,
- bearer_token,
- tenantId
- });
+/**
+ * Creates a new task status
+ * @param data Task status data to create
+ * @param bearer_token Authentication token
+ * @param tenantId Tenant identifier
+ * @returns API response with created task status
+ */
+export function createStatusRequest(
+ data: ITaskStatusCreate,
+ bearer_token: string,
+ tenantId?: string
+) {
+ return serverFetch<ITaskStatusItem>({
+ path: '/task-statuses',
+ method: 'POST',
+ body: data,
+ bearer_token,
+ tenantId
+ }).then(response => {
+ if (!response.response.ok) {
+ throw new Error(`Failed to create status: ${response.response.status}`);
+ }
+ return response;
+ });
}
+/**
+ * Updates an existing task status
+ * @param params Update parameters including ID, data, auth token and tenant
+ * @returns API response with updated task status
+ */
export function updateTaskStatusRequest({
- id,
- datas,
- bearer_token,
- tenantId
+ id,
+ datas,
+ bearer_token,
+ tenantId
}: {
- id: string | any;
- datas: ITaskStatusCreate;
- bearer_token: string;
- tenantId?: any;
+ id: string;
+ datas: ITaskStatusCreate;
+ bearer_token: string;
+ tenantId?: string;
}) {
- return serverFetch<ITaskStatusItem>({
- path: `/task-statuses/${id}`,
- method: 'PUT',
- body: datas,
- bearer_token,
- tenantId
- });
+ return serverFetch<ITaskStatusItem>({
+ path: `/task-statuses/${id}`,
+ method: 'PUT',
+ body: datas,
+ bearer_token,
+ tenantId
+ });
}
Greptile
greptile
logic: updateTaskStatusRequest missing error handling that was added to createStatusRequest
diff block
+import { useState, useEffect } from "react";
+import { Connection, PublicKey, LAMPORTS_PER_SOL } from "@solana/web3.js";
+import { AccountLayout, getMint } from "@solana/spl-token";
+
+interface UseSolanaBalanceReturn {
+ balance: number | null;
+ isLoading: boolean;
+ error: string | null;
+}
+
+export function useSolanaBalance(walletAddress: string): UseSolanaBalanceReturn {
+ const [balance, setBalance] = useState<number | null>(null);
+ const [isLoading, setIsLoading] = useState<boolean>(true);
+ const [error, setError] = useState<string | null>(null);
+
+ useEffect(() => {
+ if (!walletAddress) {
+ setError("Wallet address is required.");
+ setIsLoading(false);
+ setBalance(null);
+ return;
+ }
+
+ async function fetchBalance() {
+ setIsLoading(true);
+ setError(null);
+ try {
+ const connection = new Connection("https://api.mainnet-beta.solana.com");
+ const publicKey = new PublicKey(walletAddress);
+
+ const lamports = await connection.getBalance(publicKey);
+ setBalance(lamports / LAMPORTS_PER_SOL);
+ } catch (err) {
+ console.error(`Failed to fetch balance for ${walletAddress}:`, err);
+ if (err instanceof Error) {
+ setError(err.message);
+ } else {
+ setError("An unknown error occurred while fetching balance.");
+ }
+ setBalance(null);
+ } finally {
+ setIsLoading(false);
+ }
+ }
+
+ fetchBalance();
+ }, [walletAddress]);
+
+ return { balance, isLoading, error };
+}
+
+// Predefined map for common token mints to symbols and names
+// Add more tokens here as needed
+const KNOWN_TOKENS_MAP: Record<string, { symbol: string; name: string; decimals?: number }> = {
+ EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v: { symbol: "USDC", name: "USD Coin", decimals: 6 },
+ J1toso1uCk3RLmjorhTtrVwY9HJ7X8V9yYac6Y7kGCPn: { symbol: "JitoSOL", name: "Jito Staked SOL", decimals: 9 },
+ // Add other known tokens here, e.g., mSOL, BONK, etc.
+};
+
+export interface SplTokenBalance {
+ mintAddress: string;
+ uiAmount: number;
+ symbol: string;
+ name: string;
+ decimals: number;
+}
+
+interface UseSplTokenBalancesReturn {
+ tokenBalances: SplTokenBalance[];
+ isLoading: boolean;
+ error: string | null;
+}
+
+export function useSplTokenBalances(walletAddress: string): UseSplTokenBalancesReturn {
+ const [tokenBalances, setTokenBalances] = useState<SplTokenBalance[]>([]);
+ const [isLoading, setIsLoading] = useState<boolean>(true);
+ const [error, setError] = useState<string | null>(null);
+
+ useEffect(() => {
+ if (!walletAddress) {
+ setError("Wallet address is required for SPL tokens.");
+ setIsLoading(false);
+ setTokenBalances([]);
+ return;
+ }
+
+ async function fetchTokenBalances() {
+ setIsLoading(true);
+ setError(null);
+ const connection = new Connection("https://api.mainnet-beta.solana.com");
+ const ownerPublicKey = new PublicKey(walletAddress);
+
+ try {
+ const tokenAccounts = await connection.getTokenAccountsByOwner(ownerPublicKey, {
+ programId: new PublicKey("TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"), // SPL Token Program ID
+ });
+
+ const fetchedBalances: SplTokenBalance[] = [];
+
+ for (const { account } of tokenAccounts.value) {
+ const accountInfo = AccountLayout.decode(account.data);
+ const mintAddress = new PublicKey(accountInfo.mint).toBase58();
+
+ const tokenInfo = KNOWN_TOKENS_MAP[mintAddress];
+ let decimals = tokenInfo?.decimals;
+
+ if (decimals === undefined) {
+ try {
+ const mintData = await getMint(connection, new PublicKey(mintAddress));
+ decimals = mintData.decimals;
+ } catch (e) {
+ console.warn(`Could not fetch decimals for mint ${mintAddress}:`, e);
+ continue;
+ }
+ }
Greptile
greptile
logic: Missing error handling for decimals=undefined case. Could cause runtime error when used in Math.pow() on line 117
diff block
logger.info(
`[${requestId}] Successfully created Telegram webhook for webhook ${webhookData.id}.`
)
+
+ // Get webhook info to ensure it's properly set up
+ try {
+ const webhookInfoUrl = `https://api.telegram.org/bot${botToken}/getWebhookInfo`
+ const webhookInfo = await fetch(webhookInfoUrl, {
+ headers: {
+ 'User-Agent': 'TelegramBot/1.0'
+ }
+ });
Greptile
greptile
logic: Missing error handling for non-200 response from getWebhookInfo endpoint ```suggestion try { const webhookInfoUrl = `https://api.telegram.org/bot${botToken}/getWebhookInfo` const webhookInfo = await fetch(webhookInfoUrl, { headers: { 'User-Agent': 'TelegramBot/1.0' } }); + if (!webhookInfo.ok) { + throw new Error(`Failed to get webhook info: ${webhookInfo.status} ${webhookInfo.statusText}`); } ```
diff block
<Form.Dropdown
id="projectId"
title="Project"
- onChange={(projectId) => {
- async function getAllTasksForProject(projectId: string): Promise<void> {
- setIsLoading(true);
-
- const storedTasks: string | undefined = await LocalStorage.getItem(`project[${projectId}]`);
- if (storedTasks) setTasks(JSON.parse(storedTasks));
-
- const { data } = await fetcher(
- `/workspaces/${config.workspaceId}/projects/${projectId}/tasks?page-size=1000`,
- );
-
- setTasks(data || []);
- LocalStorage.setItem(`project[${projectId}]`, JSON.stringify(data));
- setIsLoading(false);
- }
-
- getAllTasksForProject(projectId);
+ onChange={async (projectId) => {
+ setIsLoading(true);
+ const tasksData = await getTasksForProject(projectId);
+ setTasks(tasksData);
+ setIsLoading(false);
}}
Greptile
greptile
logic: missing error handling for getTasksForProject - could leave UI in loading state if request fails
suggested fix
onChange={async (projectId) => {
setIsLoading(true);
+ try {
const tasksData = await getTasksForProject(projectId);
setTasks(tasksData);
+ } catch (error) {
+ showToast(Toast.Style.Failure, "Failed to load tasks");
+ } finally {
setIsLoading(false);
+ }
+ }}
diff block
message?: string;
}
-export async function fetchCurrentIP(): Promise<string> {
- // First, get the external IP address using HTTPS
- const ip = await new Promise<string>((resolve, reject) => {
- https
- .get('https://api.ipify.org?format=json', (res) => {
+// Function to fetch data with retry capability
+const fetchWithRetry = (
+ url: string,
+ isHttps = true,
+ maxRetries = 3
+): Promise<string> => {
+ return new Promise((resolve, reject) => {
+ let retries = 0;
+
+ const makeRequest = () => {
+ const httpModule = isHttps ? https : http;
+
+ const req = httpModule.get(url, { timeout: 8000 }, (res) => {
let data = '';
- res.on('data', (chunk) => (data += chunk));
- res.on('end', () => {
- try {
- const result = JSON.parse(data);
- resolve(result.ip);
- } catch (error) {
- console.error('Failed to parse IP fetch response:', error);
- reject(new Error('Failed to fetch IP'));
+
+ // Set a timeout on the response object too
+ res.setTimeout(8000, () => {
+ req.destroy();
+ if (retries < maxRetries) {
+ retries++;
+ console.log(
+ `Response timed out. Retrying ${retries}/${maxRetries}...`
+ );
+ setTimeout(makeRequest, 1500); // Increased wait time between retries
+ } else {
+ reject(new Error('Response timed out after multiple attempts'));
}
});
- })
- .on('error', (error) => {
- console.error('Error fetching current IP:', error);
- reject(new Error('HTTP error occurred while fetching IP'));
- });
- });
- // Then fetch the geolocation data for the IP using HTTP
- return new Promise<string>((resolve, reject) => {
- http
- .get(`http://ip-api.com/json/${ip}`, (res) => {
- let geoData = '';
- res.on('data', (chunk) => (geoData += chunk));
+ res.on('data', (chunk) => (data += chunk));
res.on('end', () => {
- try {
- const result: GeolocationResponse = JSON.parse(geoData);
- if (result.status === 'success') {
- resolve(`${result.query} - ${result.city}, ${result.country}`);
- } else {
- console.error('Failed to fetch geolocation:', result.message);
- reject(
- new Error(result.message || 'Failed to fetch geolocation')
+ // Check if we got a valid response
+ if (
+ res.statusCode &&
+ (res.statusCode < 200 || res.statusCode >= 300)
+ ) {
+ const error = new Error(`HTTP error ${res.statusCode}`);
+ if (retries < maxRetries) {
+ retries++;
+ console.log(
+ `Retry ${retries}/${maxRetries} after HTTP error ${res.statusCode}`
);
+ setTimeout(makeRequest, 1500); // Increased wait time
+ } else {
+ reject(error);
}
+ return;
+ }
+
+ try {
+ resolve(data);
} catch (error) {
- console.error('Failed to parse geolocation data:', error);
- reject(new Error('Failed to parse geolocation data'));
+ console.error('Failed to parse response:', error);
+ reject(new Error('Failed to parse response'));
}
});
- })
- .on('error', (error) => {
- console.error('HTTP error during geolocation fetch:', error);
- reject(new Error('HTTP error occurred while fetching geolocation'));
});
+
+ req.on('error', (error) => {
+ console.error(
+ `Request error (attempt ${retries + 1}/${maxRetries + 1}):`,
+ error
+ );
+ req.destroy(); // Ensure the request is destroyed
+
+ if (retries < maxRetries) {
+ retries++;
+ console.log(`Retrying ${retries}/${maxRetries}...`);
+ setTimeout(makeRequest, 1500); // Increased wait time
+ } else {
+ reject(error);
+ }
+ });
+
+ // Set a timeout for the request
+ req.setTimeout(8000, () => {
+ req.destroy();
+ if (retries < maxRetries) {
+ retries++;
+ console.log(
+ `Request timed out. Retrying ${retries}/${maxRetries}...`
+ );
+ setTimeout(makeRequest, 1500); // Increased wait time
+ } else {
+ reject(new Error('Request timed out after multiple attempts'));
+ }
+ });
+ };
+
+ makeRequest();
});
+};
+
+export async function fetchCurrentIP(): Promise<string> {
+ // First, try to get the external IP address using primary service
+ try {
+ const ipData = await fetchWithRetry(
+ 'https://api.ipify.org?format=json',
+ true
+ );
+ const ipResult = JSON.parse(ipData);
+ const ip = ipResult.ip;
Greptile
greptile
logic: Missing error handling for invalid JSON response from ipify.org
suggested fix
try {
const ipResult = JSON.parse(ipData);
const ip = ipResult.ip;
+ if (!ip) throw new Error('Invalid response format');
+ } catch (error) {
+ throw new Error('Failed to parse IP response');
}
diff block
+import { Action, confirmAlert, showToast, Toast } from "@raycast/api";
+import { MutatePromise } from "@raycast/utils";
+import { Book, removeBookStatus, TransformedUserBook } from "../../api/books";
+
+type DeleteBookProps = {
+ userBookId: number;
+ mutateBook?: MutatePromise<Book, undefined>;
+ mutateUserBooks?: MutatePromise<TransformedUserBook[], undefined>;
+};
+
+export default function DeleteBookAction({ userBookId, mutateBook, mutateUserBooks }: DeleteBookProps) {
+ return (
+ <Action
+ title="Remove"
+ style={Action.Style.Destructive}
+ onAction={async () => {
+ if (
+ await confirmAlert({
+ title: "Are you sure?",
+ message: "This will remove your review, rating and status.",
+ })
+ ) {
+ showToast({
+ style: Toast.Style.Animated,
+ title: "Removing...",
+ });
+ await removeBookStatus(userBookId);
Greptile
greptile
logic: Missing error handling for removeBookStatus call. Should use try/catch with showFailureToast.
suggested fix
+ try {
await removeBookStatus(userBookId);
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to remove book status" });
+ return;
+ }
diff block
+import {
+ HeroSection,
+ HeroH1,
+ HeroH2,
+ Section,
+ Breadcrumbs,
+ BluedotRoute,
+ ErrorSection,
+ ProgressDots,
+ HeroCTAContainer,
+ CTALinkOrButton,
+} from '@bluedot/ui';
+import Head from 'next/head';
+import useAxios from 'axios-hooks';
+import { useRouter } from 'next/router';
+import Script from 'next/script';
+import { ROUTES } from '../../lib/routes';
+import { GetAshbyJobsResponse } from '../../components/join-us/JobsListSection';
+import { GetJobResponse } from '../api/cms/jobs/[slug]';
+import MarkdownExtendedRenderer from '../../components/courses/MarkdownExtendedRenderer';
+
+const JobPostingPage = () => {
+ const { query: { ashbyIdOrCmsSlug } } = useRouter();
+ if (typeof ashbyIdOrCmsSlug !== 'string') {
+ return 'Invalid job Ashby id';
+ }
+
+ // ashbyIds are always uuids
+ if (ashbyIdOrCmsSlug.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/)) {
+ return <AshbyJobPostingPage ashbyId={ashbyIdOrCmsSlug} />;
+ }
+
+ return <CmsJobPostingPage slug={ashbyIdOrCmsSlug} />;
+};
+
+const CmsJobPostingPage = ({ slug }: { slug: string }) => {
+ const [{ data, loading, error }] = useAxios<GetJobResponse>({
+ method: 'get',
+ url: `/api/cms/jobs/${slug}`,
+ });
+
+ const currentRoute: BluedotRoute = {
+ title: data?.job.title || 'Job Posting',
+ url: `${ROUTES.joinUs.url}/${slug}`,
+ parentPages: [...(ROUTES.joinUs.parentPages ?? []), ROUTES.joinUs],
+ };
+
+ return (
+ <div>
+ {loading && <ProgressDots />}
+ {error && <ErrorSection error={error} />}
+ {data?.job && (
+ <>
+ <Head>
+ <title>{`${data.job.title} | BlueDot Impact`}</title>
+ <meta name="description" content={data.job.subtitle} />
+ <script
+ type="application/ld+json"
+ // eslint-disable-next-line react/no-danger
+ dangerouslySetInnerHTML={{
+ __html: JSON.stringify({
+ '@context': 'https://schema.org',
+ '@type': 'JobPosting',
+ title: data.job.title,
+ description: data.job.body,
+ datePosted: data.job.publishedAt ? new Date(data.job.publishedAt * 1000).toISOString() : undefined,
+ hiringOrganization: {
+ '@type': 'Organization',
+ name: 'BlueDot Impact',
+ sameAs: 'https://bluedot.org',
+ logo: 'https://bluedot.org/images/logo/icon-on-blue.svg',
+ },
+ jobLocation: {
+ '@type': 'Place',
+ address: {
+ '@type': 'PostalAddress',
+ addressLocality: 'London',
+ addressCountry: 'United Kingdom',
+ },
+ },
+ identifier: data.job.id,
+ mainEntityOfPage: {
+ '@type': 'WebPage',
+ '@id': `${ROUTES.joinUs.url}/${slug}`,
+ },
+ }),
+ }}
+ />
+ </Head>
+ <HeroSection>
+ <HeroH1>{data.job.title}</HeroH1>
+ {data.job.subtitle && <HeroH2>{data.job.subtitle}</HeroH2>}
+ {data.job.applicationUrl && (
+ <HeroCTAContainer>
+ <CTALinkOrButton url={data.job.applicationUrl}>Apply Now</CTALinkOrButton>
+ </HeroCTAContainer>
+ )}
+ </HeroSection>
+ <Breadcrumbs route={currentRoute} />
+ <Section className="max-w-3xl">
+ <MarkdownExtendedRenderer>
+ {data.job.body}
+ </MarkdownExtendedRenderer>
+ {data.job.applicationUrl && (
+ <div className="my-8">
+ <CTALinkOrButton url={data.job.applicationUrl}>Apply Now</CTALinkOrButton>
+ </div>
+ )}
+ </Section>
+ </>
+ )}
+ </div>
+ );
+};
+
+const AshbyJobPostingPage = ({ ashbyId }: { ashbyId: string }) => {
+ const [{ data: ashbyData, loading, error }] = useAxios<GetAshbyJobsResponse>({
+ method: 'get',
+ url: 'https://api.ashbyhq.com/posting-api/job-board/bluedot',
+ });
Greptile
greptile
logic: API URL should be in environment config, not hardcoded. Also missing error handling for when job is not found.
diff block
+import { getReferenceString, MedplumClient, createReference } from '@medplum/core';
+import {
+ ChargeItem,
+ ClinicalImpression,
+ Coding,
+ Encounter,
+ Patient,
+ PlanDefinition,
+ ServiceRequest,
+ Task,
+} from '@medplum/fhirtypes';
+
+export async function createEncounter(
+ medplum: MedplumClient,
+ start: Date,
+ end: Date,
+ classification: Coding,
+ patient: Patient,
+ planDefinition: PlanDefinition
+): Promise<Encounter> {
+
+ const appointment = await medplum.createResource({
+ resourceType: 'Appointment',
+ status: 'booked',
+ start: start.toISOString(),
+ end: end.toISOString(),
+ participant: [
+ {
+ actor: {
+ reference: getReferenceString(patient),
+ },
+ status: 'accepted',
+ },
+ ],
+ });
+
+ const encounter: Encounter = await medplum.createResource({
+ resourceType: 'Encounter',
+ status: 'planned',
+ statusHistory: [],
+ classHistory: [],
+ class: classification,
+ subject: createReference(patient),
+ appointment: [createReference(appointment)],
+ });
+
+ const clinicalImpressionData: ClinicalImpression = {
+ resourceType: 'ClinicalImpression',
+ status: 'completed',
+ description: 'Initial clinical impression',
+ subject: createReference(patient),
+ encounter: createReference(encounter),
+ date: new Date().toISOString(),
+ };
+
+ await medplum.createResource(clinicalImpressionData);
+
+ await medplum.post(medplum.fhirUrl('PlanDefinition', planDefinition.id as string, '$apply'), {
+ resourceType: 'Parameters',
+ parameter: [
+ { name: 'subject', valueString: getReferenceString(patient) },
+ { name: 'encounter', valueString: getReferenceString(encounter) },
+ ],
+ });
Greptile
greptile
logic: Missing error handling for PlanDefinition.$apply operation. If this fails, the encounter is already created but tasks won't be generated.
diff block
+name: Deploy API Integrations
+
+on:
+ push:
+ branches: [main, master]
+ paths:
+ - 'src/**'
+ - 'function-apps/**'
+ - '.github/workflows/api-integrations-deploy.yml'
+ workflow_dispatch:
+ inputs:
+ deploy_target:
+ description: 'Target to deploy (all, goldsky, defillama, coingecko)'
+ required: true
+ default: 'all'
+ type: choice
+ options:
+ - all
+ - goldsky
+ - defillama
+ - coingecko
+
+jobs:
+ deploy-integrations:
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: '22.x'
+ cache: 'npm'
+
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.10'
+ cache: 'pip'
+
+ - name: Install dependencies
+ run: |
+ npm ci
+ npx @goldsky/cli login
+ pip install -r src/ml-engine/requirements.txt requests
+
+ # Goldsky Deployment
+ - name: Configure Goldsky
+ if: github.event.inputs.deploy_target == 'all' || github.event.inputs.deploy_target == 'goldsky' || github.event.inputs.deploy_target == null
+ run: |
+ mkdir -p ~/.goldsky
+ echo "${{ secrets.GOLDSKY_AUTH_CONFIG }}" > ~/.goldsky/auth.json
+ shell: bash
+
+ - name: Deploy to Goldsky
+ if: github.event.inputs.deploy_target == 'all' || github.event.inputs.deploy_target == 'goldsky' || github.event.inputs.deploy_target == null
+ run: cd src/goldsky && goldsky deploy
Greptile
greptile
style: Missing error handling for Goldsky deployment. Consider adding --fail-on-error flag or checking exit code.
diff block
+import { useEffect, useState } from "react";
+import { List } from "@raycast/api";
+import { EpisodeListProps, Episode } from "../types";
+import EpisodeListItem from "./EpisodeListItem";
+import { searchSeries } from "../utils/requests";
+
+export const EpisodeList = ({ media, totalSeasons }: EpisodeListProps) => {
+ const [viewType, setViewType] = useState("Season 1");
+ const [episodes, setEpisodes] = useState<Episode[]>([]);
+
+ useEffect(() => {
+ const fetchEpisodes = async () => {
+ const seasonNumber = parseInt(viewType.split(" ")[1]);
+ const seasonData = await searchSeries(media.imdbID, seasonNumber);
+ if (seasonData && seasonData.Episodes) {
+ setEpisodes(seasonData.Episodes);
+ }
+ };
+
+ fetchEpisodes();
+ }, [viewType, media.imdbID]);
Greptile
greptile
logic: Missing error handling for API call and no loading state management. Add try/catch and isLoading to prevent empty state flicker.
suggested fix
+ const [isLoading, setIsLoading] = useState(true);
useEffect(() => {
const fetchEpisodes = async () => {
+ try {
+ setIsLoading(true);
const seasonNumber = parseInt(viewType.split(" ")[1]);
const seasonData = await searchSeries(media.imdbID, seasonNumber);
+ if (seasonData?.Episodes) {
setEpisodes(seasonData.Episodes);
}
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to fetch episodes" });
+ } finally {
+ setIsLoading(false);
}
};
fetchEpisodes();
}, [viewType, media.imdbID]);
diff block
+import { Action, ActionPanel, Alert, confirmAlert, Icon, Keyboard, showToast, Toast } from "@raycast/api";
+import { RouterOutputs, trpc } from "../utils/trpc.util";
+import { NewSpaceMemberAuthPolicyForm } from "../views/NewSpaceMemberAuthPolicyForm";
+
+export const SpaceMemberAuthPolicyItemActionPanel = (props: {
+ refetch: () => void;
+ spaceId: string;
+ emailPattern: string;
+ me: RouterOutputs["user"]["me"];
+}) => {
+ const { spaceId, refetch, emailPattern, me } = props;
+
+ const space = me.associatedSpaces.find((space) => space.id === spaceId);
+ const deletePolicy = trpc.spaceAuth.deleteMemberAuthPolicy.useMutation();
+
+ const handleDelete = async (emailPattern: string) => {
+ if (space?.myRole !== "OWNER") {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "You are not an owner of this space",
+ });
+ return;
+ }
+
+ const confirmed = await confirmAlert({
+ title: "Delete Member Auth Policy",
+ message: `Are you sure you want to delete the member auth policy for ${emailPattern}?`,
+ primaryAction: {
+ title: "Delete",
+ style: Alert.ActionStyle.Destructive,
+ },
+ });
+ if (!confirmed) {
+ return;
+ }
+
+ deletePolicy.mutate(
+ { spaceId, emailPattern },
+ {
+ onSuccess: () => {
+ refetch();
+ },
+ },
+ );
Greptile
greptile
style: Missing error handling for the delete mutation. Should use showFailureToast from @raycast/utils for consistent error handling.
suggested fix
deletePolicy.mutate(
{ spaceId, emailPattern },
{
onSuccess: () => {
refetch();
},
+ onError: (error) => {
+ showFailureToast(error, { title: "Could not delete member auth policy" });
},
},
);
diff block
+import { useState, useEffect } from "react";
+import { List } from "@raycast/api";
+import { searchTitles } from "./utils/requests";
+import SearchBarAccessory from "./components/SearchBarAccessory";
+import { Media, MediaDetails } from "./types";
+import MediaListItem from "./components/MediaListItem";
+import { sortTitles } from "./utils";
+
+interface SearchMediaProps {
+ arguments: {
+ search: string;
+ };
+}
+
+export default function SearchByTitle(props: SearchMediaProps) {
+ const [searchText, setSearchText] = useState(props.arguments.search || "");
+ const [loading, setLoading] = useState(false);
+ const [titles, setTitles] = useState<Media[]>([]);
+ const [sortOrder] = useState("none");
+ const [viewType, setViewType] = useState("all");
+
+ const onSearch = async (search: string) => {
+ setLoading(true);
+ setSearchText(search);
+ if (!search) {
+ setLoading(false);
+ return;
+ }
+
+ searchTitles(search, viewType).then((titles) => {
+ setTitles(titles);
+ setLoading(false);
+ });
Greptile
greptile
logic: Missing error handling for API call. Use try/catch and showFailureToast from @raycast/utils
suggested fix
+ try {
+ const titles = await searchTitles(search, viewType);
setTitles(titles);
setLoading(false);
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to search titles" });
setLoading(false);
}
diff block
+"use client";
+
+import { Row } from "@tanstack/react-table";
+import { useParams, usePathname, useRouter, useSearchParams } from "next/navigation";
+import { useCallback, useEffect, useState } from "react";
+
+import RefreshButton from "@/components/traces/refresh-button";
+import { columns, filters } from "@/components/traces/sessions-table/columns";
+import { useToast } from "@/lib/hooks/use-toast";
+import { SessionPreview, Trace } from "@/lib/traces/types";
+import { PaginatedResponse } from "@/lib/types";
+
+import { DataTable } from "../../ui/datatable";
+import DataTableFilter, { DataTableFilterList } from "../../ui/datatable-filter";
+import DateRangeFilter from "../../ui/date-range-filter";
+import TextSearchFilter from "../../ui/text-search-filter";
+
+type SessionRow = {
+ type: string;
+ data: SessionPreview | Trace;
+ subRows: SessionRow[];
+};
+
+interface SessionsTableProps {
+ onRowClick?: (rowId: string) => void;
+}
+
+export default function SessionsTable({ onRowClick }: SessionsTableProps) {
+ const { projectId } = useParams();
+ const searchParams = useSearchParams();
+ const pathName = usePathname();
+ const router = useRouter();
+ const { toast } = useToast();
+
+ const [focusedRowId, setFocusedRowId] = useState<string | undefined>(undefined);
+ const [sessions, setSessions] = useState<SessionRow[] | undefined>(undefined);
+
+ const defaultPageNumber = searchParams.get("pageNumber") ?? "0";
+ const defaultPageSize = searchParams.get("pageSize") ?? "50";
+ const [totalCount, setTotalCount] = useState<number>(0);
+ const pageNumber = parseInt(searchParams.get("pageNumber") ?? "0");
+ const pageSize = Math.max(parseInt(defaultPageSize), 1);
+ const pageCount = Math.ceil(totalCount / pageSize);
+ const filter = searchParams.get("filter");
+ const startDate = searchParams.get("startDate");
+ const endDate = searchParams.get("endDate");
+ const pastHours = searchParams.get("pastHours");
+ const textSearchFilter = searchParams.get("search");
+
+ const getSessions = useCallback(async () => {
+ try {
+ setSessions(undefined);
+ let queryFilter = searchParams.getAll("filter");
+
+ if (!pastHours && !startDate && !endDate) {
+ const sp = new URLSearchParams();
+ for (const [key, value] of Object.entries(searchParams)) {
+ if (key !== "pastHours") {
+ sp.set(key, value as string);
+ }
+ }
+ sp.set("pastHours", "24");
+ router.push(`${pathName}?${sp.toString()}`);
+ return;
+ }
+
+ const urlParams = new URLSearchParams();
+ urlParams.set("pageNumber", pageNumber.toString());
+ urlParams.set("pageSize", pageSize.toString());
+
+ queryFilter.forEach((filter) => urlParams.append("filter", filter));
+
+ if (pastHours != null) urlParams.set("pastHours", pastHours);
+ if (startDate != null) urlParams.set("startDate", startDate);
+ if (endDate != null) urlParams.set("endDate", endDate);
+
+ if (typeof textSearchFilter === "string" && textSearchFilter.length > 0) {
+ urlParams.set("search", textSearchFilter);
+ }
+
+ const url = `/api/projects/${projectId}/sessions?${urlParams.toString()}`;
+
+ const res = await fetch(url, {
+ method: "GET",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ });
+
+ if (!res.ok) {
+ throw new Error(`Failed to fetch sessions: ${res.status} ${res.statusText}`);
+ }
+
+ const data = (await res.json()) as PaginatedResponse<SessionPreview>;
+
+ setSessions(
+ data.items.map((s) => ({
+ type: "session",
+ data: s,
+ subRows: [],
+ }))
+ );
+
+ setTotalCount(data.totalCount);
+ } catch (error) {
+ toast({
+ title: "Failed to load sessions. Please try again.",
+ variant: "destructive",
+ });
+ // Set empty sessions to show error state
+ setSessions([]);
+ setTotalCount(0);
+ }
+ }, [
+ endDate,
+ pageNumber,
+ pageSize,
+ pastHours,
+ pathName,
+ projectId,
+ router,
+ searchParams,
+ startDate,
+ textSearchFilter,
+ filter,
+ toast,
+ ]);
+
+ const onPageChange = useCallback(
+ (pageNumber: number, pageSize: number) => {
+ const params = new URLSearchParams(searchParams);
+ params.set("pageNumber", pageNumber.toString());
+ params.set("pageSize", pageSize.toString());
+ router.push(`${pathName}?${params.toString()}`);
+ },
+ [pathName, router, searchParams]
+ );
+
+ const handleRowClick = useCallback(
+ async (row: Row<SessionRow>) => {
+ if (row.original.type === "trace") {
+ const params = new URLSearchParams(searchParams);
+ setFocusedRowId(row.original.data.id);
+ onRowClick?.(row.original.data.id);
+ params.set("selectedId", row.original.data.id);
+ router.push(`${pathName}?${params.toString()}`);
+ return;
+ }
+
+ row.toggleExpanded();
+
+ const filter = {
+ column: "session_id",
+ value: row.original.data.id,
+ operator: "eq",
+ };
+
+ const res = await fetch(
+ `/api/projects/${projectId}/traces?pageNumber=0&pageSize=50&filter=${JSON.stringify(filter)}`
+ );
Greptile
greptile
logic: Missing error handling for trace fetch request. Could leave table in inconsistent state if request fails. ```suggestion try { const res = await fetch( `/api/projects/${projectId}/traces?pageNumber=0&pageSize=50&filter=${JSON.stringify(filter)}` ); if (!res.ok) { + throw new Error(`Failed to fetch traces: ${res.status} ${res.statusText}`); } ```
diff block
+#!/bin/bash
+
+# Ensure ReportGenerator tool is installed
+if ! command -v reportgenerator &> /dev/null; then
+ echo "Installing ReportGenerator tool..."
+ dotnet tool install -g dotnet-reportgenerator-globaltool
+fi
+
+# Set working directory to the project directory
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
+cd "$SCRIPT_DIR"
+
+# Run the tests with coverage
+echo "Running tests with coverage collection..."
+dotnet test .
+
+# Generate HTML report
+echo "Generating HTML coverage report..."
+TEST_RESULTS_DIR="$SCRIPT_DIR/TestResults"
+REPORT_DIR="$SCRIPT_DIR/CoverageReport"
+
+# Create the report directory if it doesn't exist
+mkdir -p "$REPORT_DIR"
+
+# Use wildcard pattern for coverage files
+echo "Searching for coverage files..."
+COVERAGE_PATTERN="$TEST_RESULTS_DIR/**/coverage.cobertura.xml"
+
+# Generate the report using the wildcard pattern
+reportgenerator "-reports:$COVERAGE_PATTERN" "-targetdir:$REPORT_DIR" -reporttypes:Html
Greptile
greptile
logic: Missing error handling if reportgenerator fails. Should check exit code and handle failures
suggested fix
reportgenerator "-reports:$COVERAGE_PATTERN" "-targetdir:$REPORT_DIR" -reporttypes:Html
+if [ $? -ne 0 ]; then
+ echo "Error: Failed to generate coverage report"
+ exit 1
fi
diff block
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <unistd.h>
+#include <sys/time.h>
+#include <time.h>
+#include <string.h> // for strtok
+#include <getopt.h>
+#include <limits.h>
+#include <stdbool.h>
+#include <nvml.h>
+#include "gmt-lib.h"
+
+
+// All variables are made static, because we believe that this will
+// keep them local in scope to the file and not make them persist in state
+// between Threads.
+// in any case, none of these variables should change between threads
+static unsigned int msleep_time=1000;
+static struct timespec offset;
+
+static void output_stats() {
+ struct timeval now;
+ nvmlReturn_t result;
+ unsigned int device_count;
+ nvmlDevice_t device;
+ char name[NVML_DEVICE_NAME_BUFFER_SIZE];
+// nvmlUtilization_t utilization;
+// nvmlMemory_t memory;
+ unsigned int power_usage;
+// unsigned int power_limit;
+
+ result = nvmlInit();
+ if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to initialize NVML: %s\n", nvmlErrorString(result));
+ exit(1);
+ }
+
+ result = nvmlDeviceGetCount(&device_count);
+ if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get device count: %s\n", nvmlErrorString(result));
+ nvmlShutdown();
+ exit(1);
+ }
+
+ while (1) {
+ get_adjusted_time(&now, &offset);
+
+ for (unsigned int i = 0; i < device_count; i++) {
+
+ nvmlDeviceGetHandleByIndex(i, &device);
+ nvmlDeviceGetName(device, name, sizeof(name));
+// printf("GPU %u: %s\n", i, name);
+
+// nvmlDeviceGetUtilizationRates(device, &utilization);
+// printf(" Utilization: %u%%\n", utilization.gpu);
+
+// nvmlDeviceGetMemoryInfo(device, &memory);
+// printf(" Memory: %llu MiB / %llu MiB\n", memory.used / 1024 / 1024, memory.total / 1024 / 1024);
+
+// nvmlDeviceGetEnforcedPowerLimit(device, &power_limit); // mW
+
+ nvmlDeviceGetPowerUsage(device, &power_usage); // mW
+ printf("%ld%06ld %u \"%s-%u\"\n", now.tv_sec, now.tv_usec, power_usage, name, i);
Greptile
greptile
logic: Missing error handling for nvmlDeviceGetPowerUsage - could cause undefined behavior if call fails
suggested fix
+ result = nvmlDeviceGetPowerUsage(device, &power_usage); // mW
if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get power usage: %s\n", nvmlErrorString(result));
+ continue;
}
printf("%ld%06ld %u \"%s-%u\"\n", now.tv_sec, now.tv_usec, power_usage, name, i);
diff block
+import { AxiosError } from "axios";
+
+import { request } from "@app/lib/config/request";
+import { BadRequestError } from "@app/lib/errors";
+import { removeTrailingSlash } from "@app/lib/fn";
+import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
+import { AppConnection } from "@app/services/app-connection/app-connection-enums";
+
+import { OnePassConnectionMethod } from "./1password-connection-enums";
+import { TOnePassConnection, TOnePassConnectionConfig, TOnePassVault } from "./1password-connection-types";
+
+export const getOnePassInstanceUrl = async (config: TOnePassConnectionConfig) => {
+ const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
+
+ await blockLocalAndPrivateIpAddresses(instanceUrl);
+
+ return instanceUrl;
+};
+
+export const getOnePassConnectionListItem = () => {
+ return {
+ name: "1Password" as const,
+ app: AppConnection.OnePass as const,
+ methods: Object.values(OnePassConnectionMethod) as [OnePassConnectionMethod.ApiToken]
+ };
+};
+
+export const validateOnePassConnectionCredentials = async (config: TOnePassConnectionConfig) => {
+ const instanceUrl = await getOnePassInstanceUrl(config);
+
+ const { apiToken } = config.credentials;
+
+ try {
+ await request.get(`${instanceUrl}/v1/vaults`, {
+ headers: {
+ Authorization: `Bearer ${apiToken}`,
+ Accept: "application/json"
+ }
+ });
+ } catch (error: unknown) {
+ if (error instanceof AxiosError) {
+ throw new BadRequestError({
+ message: `Failed to validate credentials: ${error.message || "Unknown error"}`
+ });
+ }
+ throw new BadRequestError({
+ message: "Unable to validate connection: verify credentials"
+ });
+ }
+
+ return config.credentials;
+};
+
+export const listOnePassVaults = async (appConnection: TOnePassConnection) => {
+ const instanceUrl = await getOnePassInstanceUrl(appConnection);
+ const { apiToken } = appConnection.credentials;
+
+ const resp = await request.get<TOnePassVault[]>(`${instanceUrl}/v1/vaults`, {
+ headers: {
+ Authorization: `Bearer ${apiToken}`,
+ Accept: "application/json"
+ }
+ });
Greptile
greptile
logic: Missing error handling for failed vault listing requests. Should wrap in try/catch and provide specific error messages. ```suggestion try { const resp = await request.get<TOnePassVault[]>(`${instanceUrl}/v1/vaults`, { headers: { Authorization: `Bearer ${apiToken}`, Accept: "application/json" } }); + return resp.data; } catch (error: unknown) { if (error instanceof AxiosError) { throw new BadRequestError({ + message: `Failed to list vaults: ${error.message || "Unknown error"}` }); } throw new BadRequestError({ + message: "Unable to list vaults: verify credentials" }); } ```
diff block
+using System;
+using System.Threading.Tasks;
+using Azure.Identity;
+using Azure.Security.KeyVault.Secrets;
+using Phoenix.MarketData.Core.Configuration;
+
+namespace Phoenix.MarketData.Infrastructure.Configuration
+{
+ public class MarketDataSecretProvider : IMarketDataSecretProvider
+ {
+ private readonly SecretClient _secretClient;
+ private readonly ISecretCache _secretCache;
+
+ public MarketDataSecretProvider(string keyVaultUrl, ISecretCache? secretCache = null)
+ {
+ if (string.IsNullOrEmpty(keyVaultUrl))
+ throw new ArgumentException("Key Vault URL cannot be null or empty", nameof(keyVaultUrl));
+
+ // Use DefaultAzureCredential for managed identity or local development
+ _secretClient = new SecretClient(new Uri(keyVaultUrl), new DefaultAzureCredential());
+ _secretCache = secretCache ?? new NoOpSecretCache();
+ }
+
+ public async Task<string> GetCosmosConnectionStringAsync()
+ {
+ return await GetSecretAsync("CosmosDbConnectionString");
+ }
+
+ public async Task<string> GetEventGridKeyAsync()
+ {
+ return await GetSecretAsync("EventGridKey");
+ }
+
+ public async Task<string> GetEventGridEndpointAsync()
+ {
+ return await GetSecretAsync("EventGridEndpoint");
+ }
+
+ public async Task<string> GetEventHubConnectionStringAsync()
+ {
+ return await GetSecretAsync("EventHubConnectionString");
+ }
+
+ private async Task<string> GetSecretAsync(string secretName)
+ {
+ // Try to get from cache first
+ var cachedValue = _secretCache.GetSecret(secretName);
+ if (!string.IsNullOrEmpty(cachedValue))
+ {
+ return cachedValue;
+ }
+
+ // Retrieve from Key Vault
+ var secret = await _secretClient.GetSecretAsync(secretName);
+ var value = secret.Value.Value;
Greptile
greptile
logic: Missing error handling for when secret doesn't exist in Key Vault or when Value is null
suggested fix
var secret = await _secretClient.GetSecretAsync(secretName);
+ if (secret?.Value == null)
+ throw new InvalidOperationException($"Secret '{secretName}' not found or has null value");
var value = secret.Value.Value;
diff block
}
});
};
+
+export const useOrderPkiSubscriberCert = () => {
+ return useMutation<{ message: string }, object, TIssuePkiSubscriberCertDTO>({
Greptile
greptile
style: Missing error handling type in useMutation generic params. Should be `Error` or a specific error type instead of `object`
diff block
searchObj.data.datetime.endTime = extractedDate.to;
};
+
+ // Get metadata
+ // Update metadata
+ // Update results
+
+ const handleStreamingHits = (payload: WebSocketSearchPayload, response: WebSocketSearchResponse, isPagination: boolean, appendResult: boolean = false) => {
+ if (
+ searchObj.meta.refreshInterval > 0 &&
+ router.currentRoute.value.name == "logs"
+ ) {
+ searchObj.data.queryResults.hits = response.content.results.hits;
+ }
+
+ if (!searchObj.meta.refreshInterval) {
+ // Scan-size and took time in histogram title
+ // For the initial request, we get histogram and logs data. So, we need to sum the scan_size and took time of both the requests.
+ // For the pagination request, we only get logs data. So, we need to consider scan_size and took time of only logs request.
+ if (appendResult) {
+ searchObj.data.queryResults.hits.push(
+ ...response.content.results.hits,
+ );
+ } else {
+ searchObj.data.queryResults.hits = response.content.results.hits;
+ }
+ }
+
+ // // We are storing time_offset for the context of pagecount, to get the partial pagecount
+ // if (searchObj.data.queryResults) {
+ // searchObj.data.queryResults.time_offset = response.content?.time_offset;
+ // }
+
+ processPostPaginationData();
+ }
Greptile
greptile
logic: handleStreamingHits is missing error handling - should wrap in try/catch and handle potential errors during hit processing
suggested fix
const handleStreamingHits = (payload: WebSocketSearchPayload, response: WebSocketSearchResponse, isPagination: boolean, appendResult: boolean = false) => {
+ try {
if (
searchObj.meta.refreshInterval > 0 &&
router.currentRoute.value.name == "logs"
) {
searchObj.data.queryResults.hits = response.content.results.hits;
}
if (!searchObj.meta.refreshInterval) {
if (appendResult) {
searchObj.data.queryResults.hits.push(
...response.content.results.hits,
);
} else {
searchObj.data.queryResults.hits = response.content.results.hits;
}
}
processPostPaginationData();
+ } catch (error) {
+ console.error('Error handling streaming hits:', error);
+ searchObj.loading = false;
+ showErrorNotification('Error processing search results');
}
}
diff block
msg = "Vespa: Readiness probe did not succeed within the timeout. Exiting..."
logger.error(msg)
raise WorkerShutdown(msg)
+
+
+# File for validating worker liveness
+class LivenessProbe(bootsteps.StartStopStep):
+ requires = {"celery.worker.components:Timer"}
+
+ def __init__(self, worker: Any, **kwargs: Any) -> None:
+ super().__init__(worker, **kwargs)
+ self.requests: list[Any] = []
+ self.tref = None
+ self.path = _make_probe_path("liveness", worker.hostname)
+
+ def start(self, worker: Any) -> None:
+ self.tref = worker.timer.call_repeatedly(
+ 1.0,
+ self.update_liveness_file,
+ (worker,),
+ priority=10,
+ )
+
+ def stop(self, worker: Any) -> None:
+ self.path.unlink(missing_ok=True)
+
+ def update_liveness_file(self, worker: Any) -> None:
+ self.path.touch()
Greptile
greptile
logic: Missing error handling for file operations. Failed touch() could cause false negatives in health checks.
diff block
+// src/app/settings_features_KnowledgeBase/index.tsx
+'use client';
+
+import { Button, List, Popconfirm, Switch, Tag, Typography, message } from 'antd'; // Added List, Popconfirm, Tag
+import { DeleteOutlined, UploadOutlined } from '@ant-design/icons'; // Added icon
+import { useEffect, useState } from 'react';
+import { useTranslation } from 'react-i18next';
+import { Flexbox } from 'react-layout-kit';
+
+import { useGlobalStore } from '@/store/global';
+import {
+ selectIndexedDocuments, // Added selector for indexed documents
+ selectKnowledgeBaseSettings,
+ selectUseLocalKnowledgeBase,
+} from '@/store/global/slices/settings';
+// Assuming a generic layout component exists.
+// Path might need adjustment based on actual project structure.
+// For now, let's assume a simplified path or that it's globally available.
+// import { SettingsLayout } from '@/app/settings/layout';
+// Using a placeholder for SettingsLayout for now.
+const SettingsLayout = ({ children, pageTitle }: { children: React.ReactNode, pageTitle: string }) => (
+ <Flexbox gap={16} style={{ padding: 24 }}>
+ <Typography.Title level={3}>{pageTitle}</Typography.Title>
+ {children}
+ </Flexbox>
+);
+
+
+const { Title, Paragraph } = Typography;
+
+const KnowledgeBaseSettingsPage = () => {
+ const { t } = useTranslation('setting'); // Assuming 'setting' is a relevant namespace
+
+ // Selectors to get current state
+ const knowledgeBaseSettings = useGlobalStore(selectKnowledgeBaseSettings);
+ const useLocalKnowledgeBase = useGlobalStore(selectUseLocalKnowledgeBase);
+
+ // Actions from the store
+ const fetchSettings = useGlobalStore((s) => s.fetchKnowledgeBaseSettingsFromMain);
+ const toggleUseLocal = useGlobalStore((s) => s.toggleUseLocalKnowledgeBase);
+ const fetchDocuments = useGlobalStore((s) => s.fetchIndexedDocuments);
+ const removeDocument = useGlobalStore((s) => s.removeDocumentById);
+
+ // State for UI
+ const [isProcessingFile, setIsProcessingFile] = useState(false);
+ const [isLoadingDocuments, setIsLoadingDocuments] = useState(false);
+ const [documentsError, setDocumentsError] = useState<string | null>(null); // For list loading errors
+ const indexedDocuments = useGlobalStore(selectIndexedDocuments);
+ const [deletingDocId, setDeletingDocId] = useState<string | null>(null);
+
+
+ useEffect(() => {
+ // Fetch initial settings (like useLocalKnowledgeBase)
+ fetchSettings();
+ }, [fetchSettings]);
+
+ useEffect(() => {
+ // Fetch indexed documents if local RAG is enabled
+ if (useLocalKnowledgeBase) {
+ setIsLoadingDocuments(true);
+ setDocumentsError(null);
+ fetchDocuments().then(result => {
+ if (!result.success && result.error) {
+ setDocumentsError(result.error);
+ // No antd.message here as error is displayed in list area
+ }
+ }).finally(() => setIsLoadingDocuments(false));
+ } else {
+ const setDocs = useGlobalStore.getState().setIndexedDocuments;
+ setDocs([]);
+ setDocumentsError(null);
+ }
+ }, [useLocalKnowledgeBase, fetchDocuments]); // Removed t from dependencies as it's stable
+
+
+ const handleToggleLocalKnowledgeBase = (checked: boolean) => {
+ toggleUseLocal(checked);
+ if (!checked) {
+ const setDocs = useGlobalStore.getState().setIndexedDocuments;
+ setDocs([]);
+ setDocumentsError(null);
+ }
+ };
+
+ const handleAddFileToLocalRag = async () => {
+ if (!window.electron || !window.electron.ipcRenderer) {
+ message.error(t('knowledgeBase.localRag.ipcError', 'Electron IPC not available. This feature is only available in the desktop app.'));
+ return;
+ }
+
+ setIsProcessingFile(true);
+ const messageKey = 'processingFileMessage';
+ message.loading({
+ content: t('knowledgeBase.localRag.processingMessage', 'Processing file... This may take a moment.'),
+ key: messageKey,
+ duration: 0,
+ });
+
+ try {
+ // The IPC call itself is now made within the Zustand action if we were to use it.
+ // However, selectAndProcessFileForLocalRag in SystemCtr returns the result directly,
+ // so we can still use invoke here for direct feedback.
+ const result = await window.electron.ipcRenderer.invoke('selectAndProcessFileForLocalRag') as { success: boolean; message: string; documentId?: string; filePath?: string };
+
+ if (result.success) {
+ message.success({
+ content: result.message || t('knowledgeBase.localRag.processSuccess', 'File added successfully.'),
+ key: messageKey,
+ });
+ // The fetchDocuments action will handle its own success/error state for the list.
+ fetchDocuments().then(fetchResult => {
+ if (!fetchResult.success && fetchResult.error) {
+ setDocumentsError(fetchResult.error); // Show error in list area if fetching new list fails
+ }
+ });
Greptile
greptile
logic: Missing error handling for the fetchDocuments promise rejection. Add a .catch() block to handle potential errors.
suggested fix
fetchDocuments().then(fetchResult => {
if (!fetchResult.success && fetchResult.error) {
setDocumentsError(fetchResult.error); // Show error in list area if fetching new list fails
}
+ }).catch(error => {
+ setDocumentsError(error.message || 'Failed to fetch documents');
});
diff block
.groupBy(evaluationScores.resultId)
);
+ // Build all where conditions
+ const whereConditions = [eq(evaluationResults.evaluationId, evaluationId)];
+
+ // Handle search conditions
+ if (search && search.trim() !== "") {
+ // Build search conditions for regular fields
+ const regularSearchConditions: SQL<unknown>[] = [
+ sql`${evaluationResults.data}::text ILIKE ${'%' + search + '%'}`,
+ sql`${evaluationResults.target}::text ILIKE ${'%' + search + '%'}`,
+ sql`${evaluationResults.executorOutput}::text ILIKE ${'%' + search + '%'}`,
+ sql`${subQueryScoreCte.cteScores}::text ILIKE ${'%' + search + '%'}`
+ ];
+
+ // If we found matching traces via span search, include those matches
+ if (searchTraceIds.length > 0) {
+ regularSearchConditions.push(
+ inArray(evaluationResults.traceId, searchTraceIds)
+ );
+ }
+
+ // Build OR condition manually
+ if (regularSearchConditions.length === 1) {
+ whereConditions.push(regularSearchConditions[0]);
+ } else {
+ let orCondition = sql`(${regularSearchConditions[0]}`;
+ for (let i = 1; i < regularSearchConditions.length; i++) {
+ orCondition = sql`${orCondition} OR ${regularSearchConditions[i]}`;
+ }
+ orCondition = sql`${orCondition})`;
+ whereConditions.push(orCondition);
+ }
+ }
+
+ // Duration expression (in seconds)
+ const durationExpr = sql`EXTRACT(EPOCH FROM (${traces.endTime} - ${traces.startTime}))`;
+
+ // Total cost expression
+ const costExpr = sql`(COALESCE(${traces.inputCost}, 0) + COALESCE(${traces.outputCost}, 0))`;
+
+ // Add filter conditions
+ urlParamFilters.forEach(filter => {
+ const column = filter.column;
+ const value = filter.value;
+ const operator = filter.operator;
+
+ // Handle different column types
+ if (column === "index") {
+ whereConditions.push(eq(evaluationResults.index, parseInt(value)));
Greptile
greptile
logic: Missing error handling for parseInt() - could throw if value is not a valid integer
suggested fix
+ const parsedIndex = parseInt(value);
+ if (!isNaN(parsedIndex)) {
+ whereConditions.push(eq(evaluationResults.index, parsedIndex));
}
diff block
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <unistd.h>
+#include <sys/time.h>
+#include <time.h>
+#include <string.h> // for strtok
+#include <getopt.h>
+#include <limits.h>
+#include <stdbool.h>
+#include <nvml.h>
+#include "gmt-lib.h"
+
+
+// All variables are made static, because we believe that this will
+// keep them local in scope to the file and not make them persist in state
+// between Threads.
+// in any case, none of these variables should change between threads
+static unsigned int msleep_time=1000;
+static struct timespec offset;
+
+static void output_stats() {
+ struct timeval now;
+ nvmlReturn_t result;
+ unsigned int device_count;
+ nvmlDevice_t device;
+ char name[NVML_DEVICE_NAME_BUFFER_SIZE];
+// nvmlUtilization_t utilization;
+// nvmlMemory_t memory;
+ unsigned int power_usage;
+// unsigned int power_limit;
+
+ result = nvmlInit();
+ if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to initialize NVML: %s\n", nvmlErrorString(result));
+ exit(1);
+ }
+
+ result = nvmlDeviceGetCount(&device_count);
+ if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get device count: %s\n", nvmlErrorString(result));
+ nvmlShutdown();
+ exit(1);
+ }
+
+ while (1) {
+ get_adjusted_time(&now, &offset);
+
+ for (unsigned int i = 0; i < device_count; i++) {
+
+ nvmlDeviceGetHandleByIndex(i, &device);
+ nvmlDeviceGetName(device, name, sizeof(name));
Greptile
greptile
logic: Missing error handling for nvmlDeviceGetHandleByIndex and nvmlDeviceGetName calls
suggested fix
+ result = nvmlDeviceGetHandleByIndex(i, &device);
if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get device handle: %s\n", nvmlErrorString(result));
+ continue;
}
+ result = nvmlDeviceGetName(device, name, sizeof(name));
if (result != NVML_SUCCESS) {
+ fprintf(stderr, "Failed to get device name: %s\n", nvmlErrorString(result));
+ continue;
}
diff block
const API_URL = process.env.NEXT_PUBLIC_API_URL;
-const accessTokenKeytar = new Keytar("access-token");
-const refreshTokenKeytar = new Keytar("refresh-token");
-
-const getRefreshTokenBody = async () => {
- if (IS_DESKTOP) {
- const accessToken = await accessTokenKeytar.getPassword();
- const refreshToken = await refreshTokenKeytar.getPassword();
-
- return { accessToken, refreshToken };
- }
-
- return null;
-};
-
const refreshToken: BeforeRequestHook = async (request) => {
if (IS_BROWSER) {
- const tokenExpirationTimestamp = getCookie("tokenExpirationTimestamp");
+ const { tokenExpirationTimestamp, setTokenExpirationTimestamp, ...auth } =
+ useAuthStore.getState();
if (tokenExpirationTimestamp) {
- if (
+ const isTokenExpired =
Number(tokenExpirationTimestamp) <
- Date.now() - ACCESS_TOKEN_EXPIRATION_OFFSET_IN_MS
- ) {
+ Date.now() - ACCESS_TOKEN_EXPIRATION_OFFSET_IN_MS;
+
+ if (isTokenExpired) {
const { expiresIn, accessToken, refreshToken } = await ky
.post(`${API_URL}/auth/refresh`, {
credentials: "include",
- json: await getRefreshTokenBody(),
+ json: IS_DESKTOP ? { refreshToken: auth.refreshToken } : undefined,
})
Greptile
greptile
style: Missing error handling for failed refresh token requests. Could lead to infinite refresh attempts.
diff block
}
}
+/// Returns the skipped timestamps and the final timestamp to evaluate the alert.
+/// `tz_offset` is in minutes
+/// Frequency is in seconds
+fn get_skipped_timestamps(
+ supposed_to_run_at: i64,
+ cron: &str,
+ tz_offset: i32,
+ frequency: i64,
+ delay: i64,
+) -> (Vec<i64>, i64) {
+ let mut skipped_timestamps = Vec::new();
+ let mut next_run_at;
+ if !cron.is_empty() {
+ let cron = Schedule::from_str(cron).unwrap();
+ let suppposed_to_run_at_dt = DateTime::from_timestamp_micros(supposed_to_run_at).unwrap();
+ let suppposed_to_run_at_dt =
+ suppposed_to_run_at_dt.with_timezone(&FixedOffset::east_opt(tz_offset * 60).unwrap());
+ next_run_at = cron
+ .after(&suppposed_to_run_at_dt)
+ .next()
+ .unwrap()
+ .timestamp_micros();
+ while next_run_at <= supposed_to_run_at + delay {
+ skipped_timestamps.push(next_run_at);
+ let suppposed_to_run_at_dt = DateTime::from_timestamp_micros(next_run_at).unwrap();
+ let suppposed_to_run_at_dt = suppposed_to_run_at_dt
+ .with_timezone(&FixedOffset::east_opt(tz_offset * 60).unwrap());
+ next_run_at = cron
+ .after(&suppposed_to_run_at_dt)
+ .next()
+ .unwrap()
+ .timestamp_micros();
+ }
+ } else {
+ next_run_at = TriggerCondition::align_time(
+ supposed_to_run_at + second_micros(frequency),
+ tz_offset,
+ frequency,
+ );
Greptile
greptile
logic: Missing error handling for align_time call. Could panic if frequency is invalid.
diff block
async versionInfo(): Promise<VersionInfo> {
return this.adminService.getVersionInfo();
}
+
+ // Database Config Variables
+ // needs rework, this is probably not the best place to keep this?
+ // can refactor admin panel resolver into multiple resolvers
+
+ @UseGuards(WorkspaceAuthGuard, UserAuthGuard, AdminPanelGuard)
+ @Query(() => ConfigVariable)
+ async getDatabaseConfigVariable(
+ @Args('key', { type: () => String }) key: keyof ConfigVariables,
+ ): Promise<ConfigVariable> {
+ return this.adminService.getConfigVariable(key);
+ }
+
+ @UseGuards(WorkspaceAuthGuard, UserAuthGuard, AdminPanelGuard)
+ @Mutation(() => Boolean)
+ async createDatabaseConfigVariable(
+ @Args('key', { type: () => String }) key: keyof ConfigVariables,
+ @Args('value', { type: () => GraphQLJSON })
+ value: ConfigVariables[keyof ConfigVariables],
+ ): Promise<boolean> {
+ await this.twentyConfigService.set(key, value);
+
+ return true;
Greptile
greptile
logic: Missing error handling for set operation - should catch and handle potential errors like in updateWorkspaceFeatureFlag
diff block
baseURL: (this as any).baseURL ?? '',
params: body,
httpStatus: error?.status ? error.status : 500,
- usage: {
- inputTokens: 0,
- outputTokens: 0,
- },
+ usage: { inputTokens: 0, outputTokens: 0 },
isError: true,
error: JSON.stringify(error),
})
- passThroughStream.emit('error', error)
}
})()
+
+ // Return the other stream to the user
+ return stream2
}
- return passThroughStream as unknown as Stream<ChatCompletionChunk>
+ return value
}) as APIPromise<Stream<ChatCompletionChunk>>
Greptile
greptile
logic: missing error handling for case where 'tee' exists but fails - should wrap in try/catch
diff block
});
}
- public identify(user: UserMetadata) {
+ public async identify(user: UserMetadata) {
if (this.mixpanel && this.id) {
if (user.id !== this.id) {
this.mixpanel.alias(user.id, this.id);
PersistentStorage.USER_SETTINGS.update({ id: user.id });
}
+ const { success, data } = await checkSubscription();
Greptile
greptile
logic: missing error handling for checkSubscription() call. Could throw if network fails or response is malformed.
diff block
model: openai('gpt-4o-mini'),
});
-const fetchWeather = new Step({
+const forecastSchema = z.object({
+ date: z.string(),
+ maxTemp: z.number(),
+ minTemp: z.number(),
+ precipitationChance: z.number(),
+ condition: z.string(),
+ location: z.string(),
+});
+
+function getWeatherCondition(code: number): string {
+ const conditions: Record<number, string> = {
+ 0: 'Clear sky',
+ 1: 'Mainly clear',
+ 2: 'Partly cloudy',
+ 3: 'Overcast',
+ 45: 'Foggy',
+ 48: 'Depositing rime fog',
+ 51: 'Light drizzle',
+ 53: 'Moderate drizzle',
+ 55: 'Dense drizzle',
+ 61: 'Slight rain',
+ 63: 'Moderate rain',
+ 65: 'Heavy rain',
+ 71: 'Slight snow fall',
+ 73: 'Moderate snow fall',
+ 75: 'Heavy snow fall',
+ 95: 'Thunderstorm',
+ };
+ return conditions[code] || 'Unknown';
+}
+
+const fetchWeather = createStep({
id: 'fetch-weather',
description: 'Fetches weather forecast for a given city',
inputSchema: z.object({
city: z.string().describe('The city to get the weather for'),
}),
- execute: async ({ context }) => {
- const triggerData = context?.getStepResult<{ city: string }>('trigger');
-
- if (!triggerData) {
- throw new Error('Trigger data not found');
+ outputSchema: forecastSchema,
+ execute: async ({ inputData }) => {
+ if (!inputData) {
+ throw new Error('Input data not found');
}
- const geocodingUrl = `https://geocoding-api.open-meteo.com/v1/search?name=${encodeURIComponent(triggerData.city)}&count=1`;
+ const geocodingUrl = `https://geocoding-api.open-meteo.com/v1/search?name=${encodeURIComponent(inputData.city)}&count=1`;
const geocodingResponse = await fetch(geocodingUrl);
- const geocodingData = await geocodingResponse.json();
+ const geocodingData = (await geocodingResponse.json()) as {
Greptile
greptile
style: Missing error handling for failed geocoding API request
diff block
+import React, { useMemo, useState } from 'react'
+import { ScrollView, StyleProp, ViewStyle } from 'react-native'
+
+import { getDisplayOrderQuestions, SurveyAppearanceTheme } from '../surveys-utils'
+import { Survey, SurveyAppearance, SurveyQuestion } from '../../../../posthog-core/src/posthog-surveys-types'
+import { LinkQuestion, MultipleChoiceQuestion, OpenTextQuestion, RatingQuestion } from './QuestionTypes'
+import { PostHog } from '../../posthog-rn'
+import { usePostHog } from '../../hooks/usePostHog'
+
+const getSurveyInteractionProperty = (survey: Survey, action: string): string => {
+ let surveyProperty = `$survey_${action}/${survey.id}`
+ if (survey.current_iteration && survey.current_iteration > 0) {
+ surveyProperty = `$survey_${action}/${survey.id}/${survey.current_iteration}`
+ }
+
+ return surveyProperty
+}
+
+export const sendSurveyShownEvent = (survey: Survey, posthog: PostHog): void => {
+ posthog.capture('survey shown', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ })
+}
+
+export const sendSurveyEvent = (
+ responses: Record<string, string | number | string[] | null> = {},
+ survey: Survey,
+ posthog: PostHog
+): void => {
+ posthog.capture('survey sent', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ $survey_questions: survey.questions.map((question) => question.question),
+ ...responses,
+ $set: {
+ [getSurveyInteractionProperty(survey, 'responded')]: true,
+ },
+ })
+}
+
+export const dismissedSurveyEvent = (survey: Survey, posthog: PostHog): void => {
+ posthog.capture('survey dismissed', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ $set: {
+ [getSurveyInteractionProperty(survey, 'dismissed')]: true,
+ },
+ })
+}
+
+export function Questions({
+ survey,
+ appearance,
+ styleOverrides,
+ onSubmit,
+}: {
+ survey: Survey
+ appearance: SurveyAppearanceTheme
+ styleOverrides?: StyleProp<ViewStyle>
+ onSubmit: () => void
+}): JSX.Element {
+ const [questionsResponses, setQuestionsResponses] = useState({})
+ const [currentQuestionIndex, setCurrentQuestionIndex] = useState(0)
+ const surveyQuestions = useMemo(() => getDisplayOrderQuestions(survey), [survey])
+ const posthog = usePostHog()
+
+ const onNextButtonClick = ({
+ res,
+ originalQuestionIndex,
+ displayQuestionIndex,
+ }: {
+ res: string | string[] | number | null
+ originalQuestionIndex: number
+ displayQuestionIndex: number
+ }): void => {
+ const responseKey = originalQuestionIndex === 0 ? `$survey_response` : `$survey_response_${originalQuestionIndex}`
+
+ setQuestionsResponses({ ...questionsResponses, [responseKey]: res })
+
+ const isLastDisplayedQuestion = displayQuestionIndex === survey.questions.length - 1
+ if (isLastDisplayedQuestion) {
+ sendSurveyEvent({ ...questionsResponses, [responseKey]: res }, survey, posthog)
+ onSubmit()
+ } else {
+ setCurrentQuestionIndex(displayQuestionIndex + 1)
+ }
+ }
+
+ const question = surveyQuestions[currentQuestionIndex]
+
+ return (
+ <ScrollView style={[styleOverrides, { flexGrow: 0 }]}>
+ {getQuestionComponent({
+ question,
+ appearance,
+ onSubmit: (res) =>
+ onNextButtonClick({
+ res,
+ originalQuestionIndex: question.originalQuestionIndex,
+ displayQuestionIndex: currentQuestionIndex,
+ }),
+ })}
+ </ScrollView>
+ )
+}
+
+type GetQuestionComponentProps = {
+ question: SurveyQuestion
+ appearance: SurveyAppearance
+ onSubmit: (res: string | string[] | number | null) => void
+}
+
+const getQuestionComponent = (props: GetQuestionComponentProps): JSX.Element => {
+ const questionComponents = {
+ open: OpenTextQuestion,
+ link: LinkQuestion,
+ rating: RatingQuestion,
+ multiple_choice: MultipleChoiceQuestion,
+ single_choice: MultipleChoiceQuestion,
+ }
+
+ const Component = questionComponents[props.question.type]
+
+ return <Component key={props.question.originalQuestionIndex} {...(props as any)} />
Greptile
greptile
logic: missing error handling if Component is undefined (invalid question type). Should validate props.question.type against available components ```suggestion const Component = questionComponents[props.question.type] + if (!Component) { + throw new Error(`Unsupported question type: ${props.question.type}`) } return <Component key={props.question.originalQuestionIndex} {...(props as any)} /> ```
diff block
+import { mkdirSync } from 'node:fs'
+import { dirname, resolve } from 'node:path'
+
+import { expect, Page, test as base } from '@playwright/test'
+import { urls } from 'scenes/urls'
+
+declare module '@playwright/test' {
+ interface Page {
+ // resetCapturedEvents(): Promise<void>
+ //
+ // capturedEvents(): Promise<CaptureResult[]>
+ //
+ // waitingForNetworkCausedBy: (urlPatterns: (string | RegExp)[], action: () => Promise<void>) => Promise<void>
+ //
+ // expectCapturedEventsToBe(expectedEvents: string[]): Promise<void>
+ }
+}
+
+export const test = base.extend<{ loginBeforeTests: void; page: Page }>({
+ page: async ({ page }, use) => {
+ // // Add custom methods to the page object
+ // page.resetCapturedEvents = async function () {
+ // await this.evaluate(() => {
+ // ;(window as WindowWithPostHog).capturedEvents = []
+ // })
+ // }
+ // page.capturedEvents = async function () {
+ // return this.evaluate(() => {
+ // return (window as WindowWithPostHog).capturedEvents || []
+ // })
+ // }
+ // page.waitingForNetworkCausedBy = async function (
+ // urlPatterns: (string | RegExp)[],
+ // action: () => Promise<void>
+ // ) {
+ // const responsePromises = urlPatterns.map((urlPattern) => {
+ // return this.waitForResponse(urlPattern)
+ // })
+ //
+ // await action()
+ //
+ // // eslint-disable-next-line compat/compat
+ // await Promise.allSettled(responsePromises)
+ // }
+ // page.expectCapturedEventsToBe = async function (expectedEvents: string[]) {
+ // const capturedEvents = await this.capturedEvents()
+ // expect(capturedEvents.map((x) => x.event)).toEqual(expectedEvents)
+ // }
+
+ // Pass the extended page to the test
+ await use(page)
+ },
+ // this auto fixture makes sure we log in before every test
+ loginBeforeTests: [
+ async ({ page }, use) => {
+ const authFile = resolve('playwright/.auth/user.json')
+
+ mkdirSync(dirname(authFile), { recursive: true }) // Ensure directory exists
+
+ // perform authentication steps
+ await page.goto(urls.login())
+
+ // Wait for either login input OR the authenticated UI element
+ const loginField = page.getByPlaceholder('email@yourcompany.com')
+ const homepageMenuItem = page.locator('[data-attr="menu-item-projecthomepage"]')
+
+ const firstVisible = await Promise.race([
+ loginField.waitFor({ timeout: 5000 }).then(() => 'login'),
+ homepageMenuItem.waitFor({ timeout: 5000 }).then(() => 'authenticated'),
+ ]).catch(() => 'timeout')
+
+ if (firstVisible === 'login') {
+ // Not logged in, proceed with login
+ await loginField.fill('test@posthog.com')
+
+ const passwd = page.getByPlaceholder('••••••••••')
+ await expect(passwd).toBeVisible()
+ await passwd.fill('12345678')
+
+ await page.getByRole('button', { name: 'Log in' }).click()
+
+ // Wait for login confirmation
+ await homepageMenuItem.waitFor()
Greptile
greptile
logic: Missing error handling if homepageMenuItem.waitFor() fails after login. Could result in unclear test failures.
diff block
}
// Instantiate the LiteLLM Client
- let litellm_client = LiteLLMClient::default();
+ let litellm_client = LiteLLMClient::new(
+ std::env::var("OPENAI_API_KEY").ok(),
+ Some("https://api.openai.com/v1/".to_string()),
+ );
Greptile
greptile
logic: Missing error handling if OPENAI_API_KEY is not set. The .ok() silently ignores missing env var which could cause runtime issues.
suggested fix
let litellm_client = LiteLLMClient::new(
+ std::env::var("OPENAI_API_KEY").context("OPENAI_API_KEY environment variable not set")?,
Some("https://api.openai.com/v1/".to_string()),
);
diff block
+#!/bin/bash
+# This script can be a starting point to convert
+# PLATFORM_RELATIONSHIPS into another environment variable
+# Many CMS systems use a DATABASE_URL to connect to the database
+# Feel free to use this as inspiration
+
+getRelationshipInfo() {
+ RELATIONSHIP_NAME="$1"
+ PROPERTY="$2"
+ JQ_STR="to_entries[] | select(.key==\"$RELATIONSHIP_NAME\") | .value[].$PROPERTY"
+ CMD="echo $PLATFORM_RELATIONSHIPS | base64 -d | jq -r '$JQ_STR'"
+ eval $CMD
+}
+
+
+# choose the name of the relationship to parse, feel free to alter this
+RELATIONSHIP="postgresdatabase"
+DB_DATABASE="main"
+
+# Extract the information we need
+DB_TYPE=$(getRelationshipInfo "$RELATIONSHIP" 'scheme')
+DB_USERNAME=$(getRelationshipInfo "$RELATIONSHIP" 'username')
+DB_HOST=$(getRelationshipInfo "$RELATIONSHIP" 'host')
+DB_PASS=$(getRelationshipInfo "$RELATIONSHIP" 'password')
+DB_PORT=$(getRelationshipInfo "$RELATIONSHIP" 'port')
Greptile
greptile
style: Missing error handling if getRelationshipInfo fails to extract values. Add checks for empty/null values.
diff block
+import {
+ DelegateComponent,
+ DELEGATION_PROGRAM_ID,
+} from "../../clients/bolt-sdk/lib";
+import { expect } from "chai";
+
+export function acceleration(framework) {
+ describe("Acceleration", () => {
+ it("Check component delegation to accelerator", async () => {
+ const delegateComponent = await DelegateComponent({
+ payer: framework.provider.wallet.publicKey,
+ entity: framework.entity1Pda,
+ componentId: framework.exampleComponentPosition.programId,
+ });
Greptile
greptile
logic: Missing error handling for invalid inputs (e.g., non-existent entity or component). Should add test cases for failure scenarios.
diff block
+/* eslint-disable no-console */
+require('dotenv').config();
+const fs = require('fs');
+const path = require('path');
+
+const routesManifestPath = path.resolve(__dirname, '../.next/routes-manifest.json');
+const originalPath = path.resolve(__dirname, '../.next/routes-manifest-orig.json');
+const originalManifest = require(originalPath);
+
+const API_PATH = '/api/:path*';
+const TRACKER_SCRIPT = '/script.js';
+
+const collectApiEndpoint = process.env.COLLECT_API_ENDPOINT;
+const trackerScriptName = process.env.TRACKER_SCRIPT_NAME;
+
+const headers = [];
+const rewrites = [];
+
+if (collectApiEndpoint) {
+ const apiRoute = originalManifest.headers.find((route) => route.source === API_PATH);
+ const routeRegex = new RegExp(apiRoute.regex);
Greptile
greptile
logic: Missing error handling if apiRoute is undefined. Could throw if API_PATH not found in headers. ```suggestion const apiRoute = originalManifest.headers.find((route) => route.source === API_PATH); + if (!apiRoute) throw new Error(`Route ${API_PATH} not found in manifest headers`); const routeRegex = new RegExp(apiRoute.regex); ```
diff block
+/* eslint-disable no-console */
+require('dotenv').config();
+const fs = require('fs');
+const path = require('path');
+
+const routesManifestPath = path.resolve(__dirname, '../.next/routes-manifest.json');
+const originalPath = path.resolve(__dirname, '../.next/routes-manifest-orig.json');
+const originalManifest = require(originalPath);
+
+const API_PATH = '/api/:path*';
+const TRACKER_SCRIPT = '/script.js';
+
+const collectApiEndpoint = process.env.COLLECT_API_ENDPOINT;
+const trackerScriptName = process.env.TRACKER_SCRIPT_NAME;
+
+const headers = [];
+const rewrites = [];
+
+if (collectApiEndpoint) {
+ const apiRoute = originalManifest.headers.find((route) => route.source === API_PATH);
+ const routeRegex = new RegExp(apiRoute.regex);
+
+ rewrites.push({
+ source: collectApiEndpoint,
+ destination: '/api/send',
+ });
+
+ if (!routeRegex.test(collectApiEndpoint)) {
+ headers.push({
+ source: collectApiEndpoint,
+ headers: apiRoute.headers,
+ });
+ }
+}
+
+if (trackerScriptName) {
+ const trackerRoute = originalManifest.headers.find((route) => route.source === TRACKER_SCRIPT);
Greptile
greptile
logic: Missing error handling if trackerRoute is undefined. Could throw if TRACKER_SCRIPT not found in headers. ```suggestion const trackerRoute = originalManifest.headers.find((route) => route.source === TRACKER_SCRIPT); + if (!trackerRoute) throw new Error(`Route ${TRACKER_SCRIPT} not found in manifest headers`); ```
diff block
}
return parse(stderr, config.score);
},
+
+ async kattis(config) {
+ const { files, code } = await runQueued(`${config.execute} input answer_file feedback_dir`, {
+ copyIn: {
+ input: config.input,
+ answer_file: config.output,
+ 'feedback_dir/placeholder': { content: '' },
+ ...config.copyIn,
+ },
+ stdin: config.user_stdout,
+ copyOut: [
+ 'feedback_dir/score.txt?',
+ 'feedback_dir/judgemessage.txt?',
+ 'feedback_dir/teammessage.txt?',
+ 'feedback_dir/judgeerror.txt?',
+ ],
+ });
+
+ let status = STATUS.STATUS_SYSTEM_ERROR;
+
+ if (code === 42) {
+ status = STATUS.STATUS_ACCEPTED;
+ } else if (code === 43) {
+ status = STATUS.STATUS_WRONG_ANSWER;
+ }
+
+ const score = status === STATUS.STATUS_ACCEPTED
+ ? config.score
+ : +files['feedback_dir/score.txt'] || (0);
Greptile
greptile
logic: Potential issue with score.txt parsing - missing error handling if file exists but contains invalid number.
diff block
let reopened = !res.is_empty();
if reopened {
metrics::counter!(ISSUE_REOPENED).increment(1);
- capture_issue_reopened(self.team_id, self.id)
+ capture_issue_reopened(self.team_id, self.id);
+ send_issue_reopened_alert(context, self);
Greptile
greptile
logic: Missing error handling for send_issue_reopened_alert - the Result is ignored
suggested fix
+ send_issue_reopened_alert(context, self)?;
diff block
+import { Action, ActionPanel, Grid, Icon, PopToRootType, showHUD, showToast, Toast } from "@raycast/api";
+import { useContext, useEffect, useState } from "react";
+
+import * as api from "../../api/api";
+import * as oauth from "../../api/oauth";
+import { getWatchlistItems, SetEpisodesWatched } from "./utils";
+import { ViewTypeCtx } from "../ViewTypeCtx";
+
+export function ManageWatchGrid() {
+ const [isLoading, setIsLoading] = useState<boolean>(true);
+ const [items, setItems] = useState<(api.ExtendedAnime & { status: string; episodesWatched: number })[]>([]);
+
+ const { setViewType } = useContext(ViewTypeCtx);
+
+ useEffect(() => {
+ (async () => {
+ try {
+ await oauth.authorize();
+
+ const fetchedItems = await getWatchlistItems();
+
+ setItems(fetchedItems);
+ setIsLoading(false);
+ } catch (error) {
+ console.error(error);
+ setIsLoading(false);
+ showToast({ style: Toast.Style.Failure, title: String(error) });
+ }
+ })();
+ }, []);
+
+ return (
+ <Grid isLoading={isLoading} columns={5} aspectRatio="2/3" fit={Grid.Fit.Fill}>
+ {items.map((anime) => (
+ <Grid.Item
+ key={anime.id}
+ content={anime.main_picture.large}
+ title={anime.title}
+ subtitle={`${anime.episodesWatched}/${anime.num_episodes}`}
+ actions={
+ <ActionPanel>
+ <Action.OpenInBrowser url={`https://myanimelist.net/anime/${anime.id}`} />
+ <Action
+ title="Switch to List View"
+ onAction={() => {
+ setViewType("list");
+ }}
+ icon={Icon.List}
+ shortcut={{ modifiers: ["cmd", "shift"], key: "enter" }}
+ />
+ <Action
+ title={"Remove from Watchlist"}
+ onAction={async () => {
+ if (!(await api.alertRemoveAnime(anime))) return;
+ await api.removeAnime(anime);
+ api.removeCachedWatchlist();
+ await showHUD("Removed from Watchlist", {
+ popToRootType: PopToRootType.Immediate,
+ });
+ }}
Greptile
greptile
style: Missing error handling for removeAnime operation. Consider adding try/catch similar to the incrementEpisodes action
suggested fix
onAction={async () => {
                   try {
                     if (!(await api.alertRemoveAnime(anime))) return;
                     await api.removeAnime(anime);
                     api.removeCachedWatchlist();
                     await showHUD("Removed from Watchlist", {
                       popToRootType: PopToRootType.Immediate,
                     });
                   } catch (error) {
                     console.error(error);
                     showToast({ style: Toast.Style.Failure, title: String(error) });
                   }
                 }}
diff block
+import requests
+import itertools
+
+flatten = itertools.chain.from_iterable
+
+
+def get_prefixes_from_url(url):
+ data = requests.get(url).json()
Greptile
greptile
logic: Missing error handling for network requests. If the API is unavailable or returns non-JSON response, this will crash.
diff block
+import { open } from "@raycast/api";
+
+type Input = {
+ /**
+ * The path which either points to a directory, Xcode Project, Swift Package (Package.swift), Xcode Workspace, Swift Playground or a file.
+ */
+ path: string;
+};
+
+/**
+ * Opens a path which either points to a directory, Xcode Project, Swift Package (Package.swift), Xcode Workspace, Swift Playground or a file.
+ * @param input The input.
+ */
+export default (input: Input) => open(input.path);
Greptile
greptile
logic: Missing error handling for invalid paths or when open() fails
suggested fix
+export default async (input: Input) => {
+ try {
+ await open(input.path);
+ } catch (error) {
+ await showFailureToast("Failed to open path", error);
+ }
};
diff block
+-- Alarm notification popup script
+
+on run argv
+ set alarmTitle to "Raycast Alarm"
+
+ -- Check if we have at least one argument (the alarm title)
+ if (count of argv) > 0 then
+ set alarmTitle to item 1 of argv
+ end if
+
+ -- Create a dialog with bell emoji in the title
+ set dialogResult to display dialog alarmTitle buttons {"Stop"} default button "Stop" with title "⏰ Alarm"
+
+ -- Return the result of the dialog
+ if button returned of dialogResult is "Stop" then
+ return "stop"
+ end if
Greptile
greptile
logic: missing error handling if dialog is dismissed without clicking Stop
suggested fix
if button returned of dialogResult is "Stop" then
return "stop"
+ else
+ return "dismissed"
end if
diff block
+#!/bin/bash
+
+# Test utilities for Raycast Alarms Extension
+
+# Create a test environment with isolated configuration
+TEST_CONFIG_DIR="/tmp/raycast-alarms-test"
+ORIGINAL_CONFIG_DIR="$HOME/.raycast-alarms"
+SCRIPTS_DIR="$(pwd)/assets/scripts"
+
+# Colors for test output
+GREEN='\033[0;32m'
+RED='\033[0;31m'
+YELLOW='\033[1;33m'
+BLUE='\033[0;34m'
+NC='\033[0m' # No Color
+BOLD='\033[1m'
+
+# Log a message to console with timestamp
+log_test() {
+ echo -e "${BLUE}[$(date '+%Y-%m-%d %H:%M:%S')]${NC} $1"
+}
+
+# Log a success message
+log_success() {
+ echo -e "${GREEN}[✓ PASS]${NC} $1"
+}
+
+# Log a failure message
+log_failure() {
+ echo -e "${RED}[✗ FAIL]${NC} $1"
+}
+
+# Log a warning message
+log_warning() {
+ echo -e "${YELLOW}[⚠️ WARNING]${NC} $1"
+}
+
+# Setup test environment
+setup_test_env() {
+ log_test "Setting up test environment..."
+
+ # Create test directories
+ mkdir -p "$TEST_CONFIG_DIR/scripts"
+ mkdir -p "$TEST_CONFIG_DIR/logs"
+ mkdir -p "$TEST_CONFIG_DIR/active"
+
+ # Copy scripts to test directory
+ cp "$SCRIPTS_DIR/manage-crontab.sh" "$TEST_CONFIG_DIR/scripts/"
+ cp "$SCRIPTS_DIR/trigger-alarm.sh" "$TEST_CONFIG_DIR/scripts/"
+ cp "$SCRIPTS_DIR/show-alarm-popup.applescript" "$TEST_CONFIG_DIR/scripts/"
+
+ # Make scripts executable
+ chmod +x "$TEST_CONFIG_DIR/scripts/manage-crontab.sh"
+ chmod +x "$TEST_CONFIG_DIR/scripts/trigger-alarm.sh"
+
+ # Compile AppleScript for testing
+ osacompile -o "$TEST_CONFIG_DIR/scripts/show-alarm-popup.scpt" "$TEST_CONFIG_DIR/scripts/show-alarm-popup.applescript"
Greptile
greptile
logic: Missing error handling for osacompile failure which could break tests
diff block
}
} catch (error) {
didError = true;
- console.log('\n\n' + error.message);
- console.log(file);
}
});
Greptile
greptile
logic: Missing error handling - script should exit with non-zero status when errors occur
suggested fix
+});
+process.exit(didError ? 1 : 0);
diff block
+import { Action, ActionPanel, List, getPreferenceValues } from "@raycast/api";
+import { useFetch } from "@raycast/utils";
+import { useState } from "react";
+
+// Helper function to convert string to title case
+function toTitleCase(text?: string): string {
+ if (!text) return "";
+ return text
+ .toLowerCase()
+ .split("_")
+ .map((word) => word.charAt(0).toUpperCase() + word.slice(1))
+ .join(" ");
+}
+
+// Helper function to convert HTML snippet to markdown
+function convertHtmlToMarkdown(html?: string): string {
+ if (!html) return "";
+
+ // Remove <p> tags but keep their content
+ let markdown = html.replace(/<\/?p>/g, "");
+
+ // Convert <strong> tags to markdown bold **
+ markdown = markdown.replace(/<strong>(.*?)<\/strong>/g, "**$1** ");
+
+ return markdown;
+}
+
+// Helper function to format breadcrumbs
+function formatBreadcrumbs(breadcrumb?: string, contentCategory?: string): string {
+ if (!breadcrumb) return contentCategory || "";
+
+ const parts = breadcrumb.split(" / ");
+ // Skip the first 3 parts (docs / api / etc)
+ const relevantParts = parts.slice(3);
+
+ if (relevantParts.length === 0) return contentCategory || "";
+
+ // Convert each part to title case
+ const formattedParts = relevantParts.map((part) => toTitleCase(part));
+
+ // Join with > and prefix with content_category
+ return `${contentCategory || ""} > ${formattedParts.join(" > ")}`;
+}
+
+type Hit = {
+ title: string;
+ url: string;
+ gid: string;
+ highlights?: string[];
+ type?: string;
+ snippet?: string;
+ content_category?: string;
+ object_label?: string;
+ markdown?: string;
+ version?: string;
+ breadcrumb?: string;
+ pretty_breadcrumbs?: string;
+ icon?: {
+ source: string;
+ tooltip?: string;
+ };
+};
+
+type ContentCategory = {
+ content_category: string;
+ count: number;
+};
+
+type CategoryOption = {
+ id: string;
+ name: string;
+ count?: number;
+};
+
+type APIResponse = {
+ results?: Hit[];
+ hit_counts_by_content_category?: ContentCategory[];
+};
+
+function CategoryDropdown(props: { categories: CategoryOption[]; onCategoryChange: (newValue: string) => void }) {
+ const { categories, onCategoryChange } = props;
+
+ return (
+ <List.Dropdown
+ tooltip="Select Category"
+ storeValue={true}
+ onChange={(newValue) => {
+ onCategoryChange(newValue);
+ }}
+ >
+ <List.Dropdown.Section title="Content Categories">
+ {categories.map((category) => (
+ <List.Dropdown.Item
+ key={category.id}
+ title={category.count ? `${category.name} (${category.count})` : category.name}
+ value={category.id}
+ />
+ ))}
+ </List.Dropdown.Section>
+ </List.Dropdown>
+ );
+}
+
+export default function Command() {
+ const { version } = getPreferenceValues<{ version: string }>();
+ const [query, setQuery] = useState("");
+ const [categories, setCategories] = useState<CategoryOption[]>([{ id: "all", name: "All Categories" }]);
+ const [selectedCategory, setSelectedCategory] = useState<string>("all");
+
+ // Fetch to get categories and initial results
+ const { isLoading, data } = useFetch<APIResponse>(
+ query ? `https://shopify.dev/search/autocomplete?query=${encodeURIComponent(query)}` : "",
+ {
+ keepPreviousData: true,
+ execute: Boolean(query),
+ parseResponse: async (response) => (await response.json()) as APIResponse,
Greptile
greptile
logic: Missing error handling for failed API requests. Consider using showFailureToast from @raycast/utils ```suggestion const { isLoading, data } = useFetch<APIResponse>( query ? `https://shopify.dev/search/autocomplete?query=${encodeURIComponent(query)}` : "", { keepPreviousData: true, execute: Boolean(query), + onError: (error) => showFailureToast(error, { title: "Failed to fetch search results" }), parseResponse: async (response) => (await response.json()) as APIResponse, ```
diff block
+#!/bin/bash
+
+# Fix only Prettier issues without running other lint checks
+
+echo "Fixing Prettier formatting issues..."
+
+# Find all TypeScript/React files
+TS_FILES=$(find src -type f -name "*.ts" -o -name "*.tsx")
+
+# Run Prettier directly on the files
+npx prettier --write $TS_FILES
Greptile
greptile
logic: Missing error handling for npx command failure. Should check exit code and report errors
diff block
+import { getPreferenceValues } from "@raycast/api";
+import fetch from "node-fetch";
+
+interface Preferences {
+ apiKey: string;
+}
+
+interface Account {
+ id: string;
+ name: string;
+ nickname: string | null;
+ currentBalance: number;
+ availableBalance: number;
+ kind: string;
+ status: string;
+ createdAt: string;
+ legalBusinessName: string;
+}
+
+interface Transaction {
+ id: string;
+ amount: number;
+ counterpartyName: string;
+ createdAt: string;
+ status: string;
+ kind: string;
+}
+
+type Input = {
+ /**
+ * The time period to analyze: "month", "quarter", or "year"
+ */
+ period?: "month" | "quarter" | "year";
+};
+
+const API_BASE_URL = "https://api.mercury.com/api/v1/";
+
+/**
+ * Analyzes financial data from Mercury accounts and provides insights.
+ * Calculates metrics like cash flow, top expenses, and income sources for the specified period.
+ */
+export default async function (input: Input = {}) {
+ const { apiKey } = getPreferenceValues<Preferences>();
+ const { period = "month" } = input;
+
+ try {
+ // Fetch accounts
+ const accountsResponse = await fetch(`${API_BASE_URL}/accounts`, {
+ method: "GET",
+ headers: {
+ accept: "application/json",
+ Authorization: `Bearer ${apiKey}`,
+ },
+ });
+
+ if (!accountsResponse.ok) {
+ throw new Error(`Failed to fetch accounts: ${accountsResponse.statusText}`);
+ }
+
+ const accountsData = (await accountsResponse.json()) as {
+ accounts: Account[];
+ };
+ const accounts = accountsData.accounts;
+
+ // Calculate date range based on period
+ const startDate = new Date();
+
+ switch (period) {
+ case "month":
+ startDate.setMonth(startDate.getMonth() - 1);
+ break;
+ case "quarter":
+ startDate.setMonth(startDate.getMonth() - 3);
+ break;
+ case "year":
+ startDate.setFullYear(startDate.getFullYear() - 1);
+ break;
+ }
+
+ // Fetch transactions for each account within date range
+ const allTransactions: Transaction[] = [];
+ for (const account of accounts) {
+ const txResponse = await fetch(
+ `${API_BASE_URL}/account/${account.id}/transactions?limit=500&start=${startDate.toISOString().split("T")[0]}`,
+ {
+ method: "GET",
+ headers: {
+ accept: "application/json",
+ Authorization: `Bearer ${apiKey}`,
+ },
+ },
+ );
+
+ if (txResponse.ok) {
+ const txData = (await txResponse.json()) as {
+ transactions: Transaction[];
+ };
+ allTransactions.push(...txData.transactions);
+ }
Greptile
greptile
logic: Missing error handling for failed transaction requests. If txResponse is not ok, the error is silently ignored and those transactions are omitted from analysis, potentially leading to incomplete or misleading financial metrics. ```suggestion if (txResponse.ok) { const txData = (await txResponse.json()) as { transactions: Transaction[]; }; allTransactions.push(...txData.transactions); + } else { + throw new Error(`Failed to fetch transactions for account ${account.id}: ${txResponse.statusText}`); } ```
diff block
+import { ActionPanel, Detail, List, Action } from "@raycast/api"
+import { useEffect, useState } from "react"
+import fetch from "node-fetch"
+
+type ItemType = {
+ dictid: number
+ dict: string
+ expl: string
+}
+
+export default function Command() {
+ const [items, setItems] = useState<ItemType[]>([])
+
+ useEffect(() => {
+ fetch("https://deerpark.app/api/v1/dict/lookup/如來藏")
+ .then((response) => response.json())
+ .then((json) =>
+ setItems(
+ json.data.map((item: ItemType) => {
+ return { ...item, expl: item.expl.replace(/<\/?p>/g, "") }
+ })
+ )
+ )
+ }, [])
Greptile
greptile
logic: Missing error handling for fetch request. Wrap in try-catch and use showFailureToast from @raycast/utils
suggested fix
useEffect(() => {
+ try {
fetch("https://deerpark.app/api/v1/dict/lookup/如來藏")
.then((response) => response.json())
.then((json) =>
setItems(
json.data.map((item: ItemType) => {
return { ...item, expl: item.expl.replace(/<\/?p>/g, "") }
})
)
)
+ .catch((error) => showFailureToast(error))
+ } catch (error) {
+ showFailureToast(error)
}
}, [])
diff block
+/**
+ * Template management module.
+ * Handles storage, validation, and operations for message templates.
+ */
+import { Toast, showToast } from "@raycast/api";
+import { environment } from "@raycast/api";
+import { homedir } from "os";
+import { join } from "path";
+import fs from "fs/promises";
+import { SlackTemplate } from "../types";
+import { showCustomToast } from "./slack";
+
+/** File system constants */
+const TEMPLATES_FILENAME = "slack-templates.json";
+const templatesFilePath = join(environment.supportPath, TEMPLATES_FILENAME);
+export const DEFAULT_TEMPLATE_PATH = join(homedir(), "Downloads", "slack-templates.json");
+
+/**
+ * Ensures the storage directory exists
+ * @throws Error if directory creation fails
+ */
+async function ensureStorageDirectory(): Promise<void> {
+ try {
+ await fs.mkdir(environment.supportPath, { recursive: true });
+ } catch (error) {
+ console.error("Failed to create storage directory:", error);
+ throw error;
+ }
+}
+
+/**
+ * Loads templates from the local storage file
+ * @returns Promise<SlackTemplate[]> Array of stored templates
+ * @throws Error if file reading fails
+ */
+async function loadTemplatesFromFile(): Promise<SlackTemplate[]> {
+ try {
+ await ensureStorageDirectory();
+ const data = await fs.readFile(templatesFilePath, "utf-8");
+ return JSON.parse(data);
+ } catch (error) {
+ if ((error as NodeJS.ErrnoException).code === "ENOENT") {
+ return [];
+ }
+ console.error("Failed to load templates from file:", error);
+ throw error;
+ }
+}
+
+/**
+ * Saves templates to the local storage file
+ * @param templates - Array of templates to save
+ * @throws Error if file writing fails
+ */
+async function saveTemplatesToFile(templates: SlackTemplate[]): Promise<void> {
+ try {
+ await ensureStorageDirectory();
+ await fs.writeFile(templatesFilePath, JSON.stringify(templates, null, 2));
+ } catch (error) {
+ console.error("Failed to save templates to file:", error);
+ throw error;
+ }
+}
+
+/**
+ * Validates the format of imported templates
+ * @param templates - Unknown data to validate
+ * @returns Promise<SlackTemplate[]> Validated templates array
+ * @throws Error if validation fails
+ */
+export async function validateTemplateFormat(templates: unknown): Promise<SlackTemplate[]> {
+ const importedTemplates = templates as SlackTemplate[];
+
+ const isValid = importedTemplates.every(
+ (template) =>
+ typeof template.name === "string" &&
+ typeof template.message === "string" &&
+ typeof template.slackChannelId === "string" &&
+ typeof template.slackChannelName === "string" &&
+ (template.threadTimestamp === undefined || typeof template.threadTimestamp === "string"),
+ );
+
+ if (!isValid) {
+ throw new Error("Invalid template format");
+ }
+
+ return importedTemplates;
+}
+
+/**
+ * Checks if a file exists at the specified path
+ * @param filePath - Path to check
+ * @returns Promise<boolean> True if file exists
+ */
+export async function checkFileExists(filePath: string): Promise<boolean> {
+ try {
+ await fs.access(filePath);
+ return true;
+ } catch (error) {
+ if (error instanceof Error && error.message.includes("ENOENT")) {
+ return false;
+ }
+ throw error;
+ }
+}
+
+/**
+ * Reads templates from a specified JSON file
+ * @param filePath - Path to the JSON file
+ * @returns Promise<SlackTemplate[]> Array of templates from file
+ * @throws Error if file reading or validation fails
+ */
+export async function readTemplatesFromFile(filePath: string): Promise<SlackTemplate[]> {
+ if (!filePath) {
+ throw new Error("Please enter a file path");
+ }
+
+ if (!filePath.toLowerCase().endsWith(".json")) {
+ throw new Error("Please select a JSON file");
+ }
+
+ const fileContent = await fs.readFile(filePath, "utf8");
+ const parsedContent = JSON.parse(fileContent);
+ return await validateTemplateFormat(parsedContent);
+}
+
+/**
+ * Writes templates to a specified file
+ * @param filePath - Path to write the file
+ * @param templates - Templates to write
+ */
+export async function writeTemplatesToFile(filePath: string, templates: SlackTemplate[]): Promise<void> {
+ await fs.writeFile(filePath, JSON.stringify(templates, null, 2));
+}
Greptile
greptile
style: Missing error handling for file writing operation. This function should be wrapped in a try/catch block like other file operations in this module.
diff block
+#!/bin/sh
+
+# Raycast Alarms Extension - Installation Script
+# Crafted with ♥ for developers who value precision and reliability
+
+# ANSI color codes for better visual hierarchy
+GREEN='\033[0;32m'
+BLUE='\033[0;34m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+BOLD='\033[1m'
+
+# Helper function for printing beautifully formatted messages
+print_step() {
+ echo "${BOLD}${BLUE}[⚙️ Setup]${NC} $1"
+}
+
+print_success() {
+ echo "${GREEN}[✓ Success]${NC} $1"
+}
+
+print_info() {
+ echo "${BLUE}[ℹ️ Info]${NC} $1"
+}
+
+print_warning() {
+ echo "${YELLOW}[⚠️ Warning]${NC} $1"
+}
+
+print_error() {
+ echo "${RED}[❌ Error]${NC} $1"
+}
+
+# Print welcome banner
+echo "\n${BOLD}${BLUE}===============================================${NC}"
+echo "${BOLD}${BLUE} Raycast Alarms Extension Installer ${NC}"
+echo "${BOLD}${BLUE}===============================================${NC}"
+echo "${BLUE}Building your perfect alarm system, one script at a time${NC}\n"
+
+# Find script directory
+SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+print_info "Installation source: ${BOLD}$SCRIPT_DIR${NC}"
+
+# Setup the config directory
+CONFIG_DIR="$HOME/.raycast-alarms"
+print_step "Creating workspace at ${BOLD}$CONFIG_DIR${NC}"
+
+# Create required directories
+mkdir -p "$CONFIG_DIR/scripts"
+mkdir -p "$CONFIG_DIR/logs"
+mkdir -p "$CONFIG_DIR/active"
Greptile
greptile
logic: Missing error handling for mkdir operations. Should check return status and exit if directory creation fails.
suggested fix
+mkdir -p "$CONFIG_DIR/scripts" || { print_error "Failed to create scripts directory"; exit 1; }
+mkdir -p "$CONFIG_DIR/logs" || { print_error "Failed to create logs directory"; exit 1; }
+mkdir -p "$CONFIG_DIR/active" || { print_error "Failed to create active directory"; exit 1; }
diff block
+import { LocalStorage, showToast, Toast } from "@raycast/api";
+import { useCallback, useEffect, useMemo, useState } from "react";
+import { Chat, SavedChat, SavedChatHook } from "../type";
+
+export function useSavedChat(): SavedChatHook {
+ const [data, setData] = useState<SavedChat[]>([]);
+ const [isLoading, setLoading] = useState<boolean>(true);
+
+ useEffect(() => {
+ (async () => {
+ const storedSavedChats = await LocalStorage.getItem<string>("savedChats");
+
+ if (storedSavedChats) {
+ setData((previous) => [...previous, ...JSON.parse(storedSavedChats)]);
+ }
+ setLoading(false);
+ })();
+ }, []);
+
+ useEffect(() => {
+ LocalStorage.setItem("savedChats", JSON.stringify(data));
+ }, [data]);
Greptile
greptile
style: Missing error handling when writing to LocalStorage. Consider using try/catch or showFailureToast from @raycast/utils.
suggested fix
useEffect(() => {
+ try {
LocalStorage.setItem("savedChats", JSON.stringify(data));
+ } catch (error) {
+ showFailureToast(error, { title: "Failed to save chats" });
}
}, [data]);
diff block
+import { $fetch } from "ofetch";
+
+/**
+ * Fetch the available categories for Nuxt modules
+ *
+ * Use this tool when:
+ * - You need to know what categories of modules are available
+ * - You want to explore modules by category
+ * - You need to recommend module categories based on user requirements
+ *
+ * @returns The list of available module categories
+ */
+export default async function tool() {
+ const { categories } = await $fetch("https://api.nuxt.com/modules/categories");
+ return categories as string[];
Greptile
greptile
logic: Missing error handling for failed API requests. Should wrap in try/catch and provide graceful error handling. ```suggestion export default async function tool() { + try { const { categories } = await $fetch("https://api.nuxt.com/modules/categories"); return categories as string[]; + } catch (error) { + throw new Error(`Failed to fetch module categories: ${error.message}`); + } ```
diff block
AccessKind::Public | AccessKind::Private => {
// Register new user
let user_id = Uuid::new_v4();
- msg!([ctx] user::msg::create(user_id) -> user::msg::create_complete {
- user_id: Some(user_id.into()),
- namespace_id: None,
- display_name: None,
- })
+ let mut creation_sub = chirp_workflow::compat::subscribe::<
+ ::user::workflows::user::CreateComplete, _
+ >(&ctx, ("user_id", user_id)).await?;
+
+ chirp_workflow::compat::workflow(
+ &ctx,
+ ::user::workflows::user::Input {
+ user_id,
+ display_name: None,
+ }
+ )
+ .await?
+ .tag("user_id", user_id)
+ .dispatch()
.await?;
+
+ creation_sub.next().await?;
Greptile
greptile
style: missing error handling for subscription timeout - should add a timeout to prevent indefinite waiting
suggested fix
+ tokio::time::timeout(
+ std::time::Duration::from_secs(30),
+ creation_sub.next()
+ ).await?.map_err(|_| err_code!(TIMED_OUT, msg = "user creation timed out"))?;
diff block
+import { ActionPanel, Detail, List, Action } from "@raycast/api"
+import { useEffect, useState } from "react"
+import fetch from "node-fetch"
+
+type ItemType = {
+ id: string
+ title: string
+ byline: string
+ juans: [number]
+ juan: number
+ chars: number
+ url: string
+}
+
+export default function Command() {
+ const [items, setItems] = useState<ItemType[]>([])
+
+ useEffect(() => {
+ fetch("https://deerpark.app/api/v1/readinglist/home")
+ .then((response) => response.json())
+ .then((json) =>
+ setItems(
+ json.items.map((item: ItemType) => {
+ // item.url = `https://deerpark.app/reader/${item.id}/${item.juans[0]}`
+ return { title: item.title, url: item.url }
+ })
+ )
+ )
+ }, [])
Greptile
greptile
logic: Missing error handling for fetch. Use try/catch and showFailureToast from @raycast/utils ```suggestion useEffect(() => { + async function fetchItems() { + try { + const response = await fetch("https://deerpark.app/api/v1/readinglist/home"); + const json = await response.json(); setItems( json.items.map((item: ItemType) => { // item.url = `https://deerpark.app/reader/${item.id}/${item.juans[0]}` return { title: item.title, url: item.url } }) + ); + } catch (error) { + showFailureToast("Failed to fetch reading list", error); } } + fetchItems(); }, []) ```
diff block
import "./util/fetchPolyfill";
-import { useState, useEffect } from "react";
-import { ActionPanel, Action, Detail, Icon, List, showToast, Toast, useNavigation } from "@raycast/api";
-import { setActiveSite, Site } from "./api/site";
+import { ActionPanel, Action, Detail, Icon, List, useNavigation } from "@raycast/api";
+import { setActiveSite } from "./api/site";
import { fetchSites } from "./api/atlassian";
import { authorizeSite } from "./api/auth";
+import { usePromise } from "@raycast/utils";
export default function Command() {
const { pop } = useNavigation();
- const [isLoading, setIsLoading] = useState<boolean>(true);
- const [sites, setSites] = useState<Site[]>([]);
-
- useEffect(() => {
- (async () => {
- try {
- await authorizeSite(false);
- const sites = await fetchSites();
- setSites(sites);
- setIsLoading(false);
- } catch (error) {
- console.error(error);
- setIsLoading(false);
- showToast({ style: Toast.Style.Failure, title: String(error) });
- }
- })();
- }, []);
+
+ const {isLoading, data: sites = []} = usePromise(async () => {
+ await authorizeSite(false);
+ const sites = await fetchSites();
+ return sites;
+ })
Greptile
greptile
logic: Missing error handling - usePromise should include failureToastOptions for consistent error reporting across the extension
suggested fix
const {isLoading, data: sites = []} = usePromise(async () => {
await authorizeSite(false);
const sites = await fetchSites();
return sites;
+ }, {
+ failureToastOptions: {
+ title: "Failed to load Confluence sites"
+ }
})
diff block
+import { showHUD } from "@raycast/api";
+import { triggerAllWorkFlowsCLI } from "./utils/n8n-cli-utils";
+import { appInstalled } from "./utils/common-utils";
+import { appNotInstallAlertDialog } from "./hooks/hooks";
+
+export default async () => {
+ if (!appInstalled()) {
+ await appNotInstallAlertDialog();
+ return;
+ }
+ await showHUD("Activating all workflows...");
+ const result = await triggerAllWorkFlowsCLI(true);
+ await showHUD(result);
Greptile
greptile
Missing error handling for triggerAllWorkFlowsCLI call which could fail
suggested fix
+ try {
const result = await triggerAllWorkFlowsCLI(true);
await showHUD(result);
+ } catch (error) {
+ await showHUD("Failed to activate workflows");
}
diff block
+from zoneinfo import ZoneInfo
+from posthog.hogql import ast
+from posthog.hogql.modifiers import create_default_modifiers_for_team
+from posthog.hogql.parser import parse_expr
+from posthog.hogql.property import property_to_expr
+from posthog.hogql.query import execute_hogql_query
+from posthog.hogql_queries.experiments import CONTROL_VARIANT_KEY
+from posthog.hogql_queries.experiments.trends_statistics import (
+ are_results_significant,
+ calculate_credible_intervals,
+ calculate_probabilities,
+)
+from posthog.hogql_queries.experiments.trends_statistics_v2_count import (
+ are_results_significant_v2_count,
+ calculate_credible_intervals_v2_count,
+ calculate_probabilities_v2_count,
+)
+from posthog.hogql_queries.experiments.trends_statistics_v2_continuous import (
+ are_results_significant_v2_continuous,
+ calculate_credible_intervals_v2_continuous,
+ calculate_probabilities_v2_continuous,
+)
+from posthog.hogql_queries.experiments.funnels_statistics_v2 import (
+ calculate_probabilities_v2 as calculate_probabilities_v2_funnel,
+ are_results_significant_v2 as are_results_significant_v2_funnel,
+ calculate_credible_intervals_v2 as calculate_credible_intervals_v2_funnel,
+)
+from posthog.hogql_queries.query_runner import QueryRunner
+from posthog.hogql_queries.utils.query_date_range import QueryDateRange
+from posthog.models.experiment import Experiment
+from rest_framework.exceptions import ValidationError
+from posthog.schema import (
+ CachedExperimentQueryResponse,
+ ExperimentDataWarehouseMetricConfig,
+ ExperimentEventMetricConfig,
+ ExperimentMetricType,
+ ExperimentQueryResponse,
+ ExperimentSignificanceCode,
+ ExperimentQuery,
+ ExperimentVariantFunnelsBaseStats,
+ ExperimentVariantTrendsBaseStats,
+ DateRange,
+ IntervalType,
+)
+from typing import Optional, cast
+from datetime import datetime, timedelta, UTC
+
+
+class ExperimentQueryRunner(QueryRunner):
+ query: ExperimentQuery
+ response: ExperimentQueryResponse
+ cached_response: CachedExperimentQueryResponse
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ if not self.query.experiment_id:
+ raise ValidationError("experiment_id is required")
+
+ self.experiment = Experiment.objects.get(id=self.query.experiment_id)
+ self.feature_flag = self.experiment.feature_flag
Greptile
greptile
logic: missing error handling for Experiment.objects.get() which could raise DoesNotExist
suggested fix
+ try:
self.experiment = Experiment.objects.get(id=self.query.experiment_id)
self.feature_flag = self.experiment.feature_flag
+ except Experiment.DoesNotExist:
+ raise ValidationError(f"Experiment with id {self.query.experiment_id} does not exist")
diff block
fs::remove_dir_all(&dist_path).context("fs::remove_dir_all")?;
}
- // Build manager dependencies (required for building the manager itself)
- let output = tokio::process::Command::new("yarn")
- .arg("install")
- .arg("--immutable")
- .current_dir(&manager_path)
- .output()
- .await?;
- println!("stdout:\n{}", String::from_utf8_lossy(&output.stdout));
- println!("stderr:\n{}", String::from_utf8_lossy(&output.stderr));
- ensure!(output.status.success(), "yarn install failed");
-
- let output = tokio::process::Command::new("yarn")
- .arg("run")
- .arg("build")
- .arg("--filter=@rivet-gg/actor-manager")
- .current_dir(&project_root)
- .output()
+
+ if std::env::var("RIVET_SKIP_BUILD_HUB").is_err() {
+ // Build manager dependencies (required for building the manager itself)
+ let output = tokio::process::Command::new("yarn")
+ .arg("install")
+ .arg("--immutable")
+ .current_dir(&manager_path)
+ .output()
+ .await?;
+ println!("stdout:\n{}", String::from_utf8_lossy(&output.stdout));
+ println!("stderr:\n{}", String::from_utf8_lossy(&output.stderr));
+ ensure!(output.status.success(), "yarn install failed");
+
+ let output = tokio::process::Command::new("yarn")
+ .arg("run")
+ .arg("build")
+ .arg("--filter=@rivet-gg/actor-manager")
+ .current_dir(&project_root)
+ .output()
+ .await?;
+ println!("stdout:\n{}", String::from_utf8_lossy(&output.stdout));
+ println!("stderr:\n{}", String::from_utf8_lossy(&output.stderr));
+ ensure!(output.status.success(), "yarn build failed");
+
+ // Build manager using Rivet build script (not using tsup/turbo because this includes custom
+ // polyfill functionality)
+ build_backend_command_raw(CommandOpts {
+ task_path: "src/tasks/build/mod.ts",
+ input: json!({
+ "projectRoot": sdk_path.join("manager"),
+ "entryPoint": sdk_path.join("manager/src/mod.ts"),
+ "outDir": dist_path.join("manager"),
+ "bundle": {
+ "minify": true,
+ "analyzeResult": false,
+ "logLevel": "debug"
+ }
+ }),
+ })
.await?;
- println!("stdout:\n{}", String::from_utf8_lossy(&output.stdout));
- println!("stderr:\n{}", String::from_utf8_lossy(&output.stderr));
- ensure!(output.status.success(), "yarn build failed");
-
- // Build manager using Rivet build script (not using tsup/turbo because this includes custom
- // polyfill functionality)
- build_backend_command_raw(CommandOpts {
- task_path: "src/tasks/build/mod.ts",
- input: json!({
- "projectRoot": sdk_path.join("manager"),
- "entryPoint": sdk_path.join("manager/src/mod.ts"),
- "outDir": dist_path.join("manager"),
- "bundle": {
- "minify": true,
- "analyzeResult": false,
- "logLevel": "debug"
- }
- }),
- })
- .await?;
+ } else {
+ fs::create_dir_all(&dist_path).context("fs::create_dir_all");
Greptile
greptile
logic: Missing error handling - Result from create_dir_all is ignored. Add ? operator to propagate error.
suggested fix
+ fs::create_dir_all(&dist_path).context("fs::create_dir_all")?;
diff block
+import { resolve } from "path";
+import { homedir } from "os";
+import { showToast, Toast, open, getApplications, popToRoot } from "@raycast/api";
+import { useState, useEffect } from "react";
+import path from "path";
+import fs from "fs";
+import TasksList from "./components/TasksList";
+
+export default function Command() {
+ const [evernoteDB, setEvernoteDB] = useState<string | null>(null);
+ const knownEvernoteDirLocations = [
+ resolve(
+ homedir(),
+ "Library/Containers/com.evernote.Evernote/Data/Library/Application Support/Evernote/conduit-storage/https%3A%2F%2Fwww.evernote.com",
+ ),
+ resolve(homedir(), "Library/Application Support/Evernote/conduit-storage/https%3A%2F%2Fwww.evernote.com"),
+ ];
+
+ useEffect(() => {
+ getApplications().then(async (applications) => {
+ const isEvernoteInstalled = applications.find(({ bundleId }) => bundleId === "com.evernote.Evernote");
+ if (!isEvernoteInstalled) {
+ await popToRoot();
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "Evernote client is not installed.",
+ message: "Download",
+ primaryAction: {
+ title: "Go to https://evernote.com/download",
+ onAction: (toast) => {
+ open("https://evernote.com/download");
+ toast.hide();
+ },
+ },
+ });
+ return;
+ }
+ let baseDir: string | null = null;
+ if (evernoteDB) {
+ return;
+ }
+ for (const directory of knownEvernoteDirLocations) {
+ if (fs.existsSync(directory)) {
+ baseDir = directory;
+ break;
+ }
+ }
+ if (!baseDir || !fs.existsSync(baseDir)) {
+ await popToRoot();
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "Cannot find Evernote database.",
+ message:
+ "The database should be in ~/Library/Application Support/Evernote/conduit-storage/https%3A%2F%2Fwww.evernote.com, but can be somewhere else.",
+ });
+ return;
+ }
+ const files = fs.readdirSync(baseDir);
+ console.log(files);
+ const dbFile = files.find((file) => file.endsWith("+RemoteGraph.sql"));
+ if (dbFile) {
+ setEvernoteDB(path.join(baseDir, dbFile));
+ }
Greptile
greptile
logic: missing error handling if no database file is found - should show toast error
diff block
+import { getPreferenceValues } from "@raycast/api";
+import fetch from "node-fetch";
+import { BASE_URL } from "../utils/constants";
+
+export default async function () {
+ const preferences = getPreferenceValues<{
+ tessieApiKey: string;
+ VIN: string;
+ }>();
+
+ const API_KEY = preferences.tessieApiKey;
+ const VIN = preferences.VIN;
+
+ const response = await fetch(`${BASE_URL}/${VIN}/state`, {
+ headers: {
+ Authorization: `Bearer ${API_KEY}`,
+ },
+ });
Greptile
greptile
logic: Missing error handling for the fetch request. If the API is unavailable or returns an error status, this will throw an unhandled exception.
diff block
+"use client";
+
+import { z } from "zod";
+
+import { zodResolver } from "@hookform/resolvers/zod";
+import { useForm } from "react-hook-form";
+
+import { Button } from "@/components/ui/button";
+import {
+ Form,
+ FormControl,
+ FormField,
+ FormItem,
+ FormLabel,
+ FormMessage,
+} from "@/components/ui/form";
+import { Input } from "@/components/ui/input";
+import {
+ registerJunoDomain,
+ registerJunoSenderAddress,
+ setupJunoEmail,
+} from "@/lib/sdkUtils";
+
+const sendgridFormSchema = z.object({
+ apiKey: z.string().regex(/^SG\./, "SendGrid API key must start with 'SG.'"),
+});
+
+const domainFormSchema = z.object({
+ domain: z.string(),
+ subdomain: z.string(),
+});
+
+const registerSenderAddressFormSchema = z.object({
+ email: z.string().email("Please enter a valid email address"),
+ name: z.string().min(1, "Name is required"),
+ replyTo: z.string().email("Please enter a valid email address").optional(),
+ nickname: z.string().min(1, "Nickname is required"),
+ address: z.string().min(1, "Address is required"),
+ city: z.string().min(1, "City is required"),
+ state: z.string().min(1, "State is required"),
+ zip: z.string().regex(/^\d{5}(-\d{4})?$/, "Please enter a valid ZIP code"),
+ country: z.string().min(1, "Country is required"),
+});
+
const EmailServicePage = () => {
- return <></>;
+ const sendgridForm = useForm<z.infer<typeof sendgridFormSchema>>({
+ resolver: zodResolver(sendgridFormSchema),
+ defaultValues: { apiKey: "" },
+ });
+ const domainForm = useForm<z.infer<typeof domainFormSchema>>({
+ resolver: zodResolver(domainFormSchema),
+ defaultValues: { domain: "", subdomain: "" },
+ });
+ const registerSenderAddressForm = useForm<
+ z.infer<typeof registerSenderAddressFormSchema>
+ >({
+ resolver: zodResolver(registerSenderAddressFormSchema),
+ defaultValues: {
+ email: "",
+ name: "",
+ replyTo: "",
+ nickname: "",
+ address: "",
+ city: "",
+ state: "",
+ zip: "",
+ country: "",
+ },
+ });
+
+ function onSubmitSendgridForm(values: z.infer<typeof sendgridFormSchema>) {
+ setupJunoEmail(values.apiKey);
+ }
Greptile
greptile
logic: Missing error handling and user feedback for API response. Need to handle the returned success/message values from setupJunoEmail
suggested fix
+ async function onSubmitSendgridForm(values: z.infer<typeof sendgridFormSchema>) {
+ try {
+ await setupJunoEmail(values.apiKey);
+ // TODO: Add success notification
+ } catch (error) {
+ // TODO: Add error notification
+ console.error(error);
}
}
diff block
+import { ActionPanel, Detail, List, Action } from "@raycast/api"
+import { useEffect, useState } from "react"
+import fetch from "node-fetch"
+
+type ItemType = {
+ dictid: number
+ dict: string
+ expl: string
+}
+
+export default function Command() {
+ const [items, setItems] = useState<ItemType[]>([])
+
+ useEffect(() => {
+ fetch("https://deerpark.app/api/v1/dict/lookup/如來藏")
+ .then((response) => response.json())
+ .then((json) =>
+ setItems(
+ json.data.map((item: ItemType) => {
+ return { ...item, expl: item.expl.replace(/<\/?p>/g, "") }
+ })
+ )
+ )
+ }, [])
Greptile
greptile
logic: Missing error handling for API call and loading state management. Could fail silently if API is down.
suggested fix
useEffect(() => {
+ const fetchData = async () => {
+ try {
+ const response = await fetch("https://deerpark.app/api/v1/dict/lookup/如來藏");
+ const json = await response.json();
setItems(
json.data.map((item: ItemType) => {
return { ...item, expl: item.expl.replace(/<\/?p>/g, "") }
})
+ );
+ } catch (error) {
+ showFailureToast("Failed to fetch dictionary data", error);
}
+ };
+ fetchData();
}, [])
diff block
+import { homedir } from "os";
+import { resolve } from "path";
+import { getPersistenceInfo } from "../api/get-persistence-info";
+
+export const getDbPath = async () => {
+ const persistenceInfo = await getPersistenceInfo();
+ return resolve(homedir(), "Library/Mail", persistenceInfo.LastUsedVersionDirectoryName, "MailData/Envelope Index");
+};
Greptile
greptile
logic: Missing error handling if getPersistenceInfo() fails or returns unexpected data. Should wrap in try/catch and validate persistenceInfo.LastUsedVersionDirectoryName exists. ```suggestion export const getDbPath = async () => { + try { const persistenceInfo = await getPersistenceInfo(); + if (!persistenceInfo?.LastUsedVersionDirectoryName) { + throw new Error("Could not determine Mail version directory"); + } return resolve(homedir(), "Library/Mail", persistenceInfo.LastUsedVersionDirectoryName, "MailData/Envelope Index"); + } catch (error) { + throw new Error(`Failed to get Mail database path: ${error.message}`); + } }; ```
diff block
+import { useForm } from "@raycast/utils";
+import { useState } from "react";
+import { compare } from "../../utils/pixelmatch";
+import { fileValidation } from "./file-validation";
+import { Jimp } from "jimp";
+
+interface ImagesFormValues {
+ actual: string[];
+ expected: string[];
+}
+
+export const useImagesForm = () => {
+ const [markdown, setMarkdown] = useState<string>("");
+
+ const createMarkdown = async (
+ diffImageSource: Promise<{
+ diffBuffer: Buffer;
+ width: number;
+ height: number;
+ }>,
+ ) => {
+ const { width, height, diffBuffer } = await diffImageSource;
+ const diffJimpImage = await new Jimp({ data: diffBuffer, width, height });
+ const base64Image = await diffJimpImage.getBase64("image/png");
+ setMarkdown(`![](${base64Image})`);
+ };
+
+ const { handleSubmit, itemProps } = useForm<ImagesFormValues>({
+ onSubmit(values) {
+ const diffImageSource = compare(values.actual[0], values.expected[0]);
+ createMarkdown(diffImageSource);
+ },
Greptile
greptile
logic: Missing error handling for image comparison. If compare() or createMarkdown() fails, there's no error handling or feedback to the user.
diff block
+import { PostHogPersistedProperty } from '../../../posthog-core/src'
+import { useCallback, useEffect, useState } from 'react'
+import { usePostHog } from '../hooks/usePostHog'
+
+type SurveyStorage = {
+ seenSurveys: string[]
+ setSeenSurvey: (surveyId: string) => void
+ lastSeenSurveyDate: Date | undefined
+ setLastSeenSurveyDate: (date: Date) => void
+}
+
+export function useSurveyStorage(): SurveyStorage {
+ const posthogStorage = usePostHog()
+ const [lastSeenSurveyDate, setLastSeenSurveyDate] = useState<Date | undefined>(undefined)
+ const [seenSurveys, setSeenSurveys] = useState<string[]>([])
+
+ useEffect(() => {
+ posthogStorage.ready().then(() => {
+ const lastSeenSurveyDate = posthogStorage.getPersistedProperty(PostHogPersistedProperty.SurveyLastSeenDate)
+ if (typeof lastSeenSurveyDate === 'string') {
+ setLastSeenSurveyDate(new Date(lastSeenSurveyDate))
+ }
+
+ const serialisedSeenSurveys = posthogStorage.getPersistedProperty(PostHogPersistedProperty.SurveysSeen)
+ if (typeof serialisedSeenSurveys === 'string') {
+ const parsedSeenSurveys: unknown = JSON.parse(serialisedSeenSurveys)
+ if (Array.isArray(parsedSeenSurveys) && typeof parsedSeenSurveys[0] === 'string') {
+ setSeenSurveys(parsedSeenSurveys)
+ }
+ }
+ })
+ }, [posthogStorage])
Greptile
greptile
logic: Missing error handling for the promise chain. Also needs cleanup to prevent state updates after unmount.
diff block
+import { Cache } from '@raycast/api';
+import axios from 'axios';
+import * as cheerio from 'cheerio';
+import { CardSlot, ClassName, Deck, Rarity } from '../types/types';
+import { CacheEntry } from './utils';
+
+const CACHE_DURATION_IN_MS = 3600 * 1_000;
+export const hsguru_BEST_DECKS_URL = (format: number) => `https://www.hsguru.com/decks/?format=${format}`;
+
+const cache = new Cache();
+
+export const hsguruBestDecksWithFiltersUrl = (className: ClassName, format: number, minGames?: number) => {
+ // 将类名转换为API所需的大写格式
+ const classNameForApi = className.toString().replace(/\s+/g, '').toUpperCase();
+
+ let url = `https://www.hsguru.com/decks/?format=${format}&player_class=${classNameForApi}`;
+ if (minGames) {
+ url += `&min_games=${minGames}`;
+ }
+ return url;
+};
+
+export const gethsguruBestDecks = async (format: number = 1) => {
+ const cacheKey = `all_classes_${format}`;
+
+ const cachedDecks = getFromCache(cacheKey);
+
+ if (cachedDecks) {
+ return Promise.resolve(cachedDecks);
+ }
+
+ const decks = await fetchDecks(hsguru_BEST_DECKS_URL(format));
+ saveToCache(cacheKey, decks);
+
+ return decks;
+};
+
+export const gethsguruBestDecksByClass = async (className: ClassName, format: number = 1, minGames?: number) => {
+ const cacheKey = `${className.toString()}_${format}_${minGames || ''}`;
+
+ const cachedDecks = getFromCache(cacheKey);
+
+ if (cachedDecks) {
+ return Promise.resolve(cachedDecks);
+ }
+
+ const decks = await fetchDecks(hsguruBestDecksWithFiltersUrl(className, format, minGames));
+ saveToCache(cacheKey, decks);
+
+ return decks;
+};
+
+const getFromCache = (cacheKey: string) => {
+ const cachedResponse = cache.get(cacheKey);
+ if (cachedResponse) {
+ const parsed: CacheEntry = JSON.parse(cachedResponse);
+
+ const elapsed = Date.now() - parsed.timestamp;
+
+ if (elapsed <= CACHE_DURATION_IN_MS) {
+ return parsed.decks;
+ }
+ }
+
+ return null;
+};
+
+const saveToCache = (cacheKey: string, decks: Deck[]) => {
+ cache.set(cacheKey, JSON.stringify({ timestamp: Date.now(), decks: decks }));
+};
+
+export const fetchDecks = async (url: string) => {
+ const response = await axios.get(url);
+ const $ = cheerio.load(response.data);
Greptile
greptile
logic: Missing error handling for failed HTTP requests. Should wrap in try/catch and handle network errors gracefully.
suggested fix
export const fetchDecks = async (url: string) => {
+ try {
const response = await axios.get(url);
const $ = cheerio.load(response.data);
diff block
+import { knowledgeGraphManager } from "../knowledge-graph-manager";
+
+/**
+ * Search the knowledge graph by matching a query to entity names, types, or observations.
+ */
+export default async function (input: {
+ /**
+ * A query string used to search entity names, types, or observations.
+ */
+ query: string;
+}) {
+ return knowledgeGraphManager.searchNodes(input.query);
+}
Greptile
greptile
logic: Missing error handling for searchNodes call. Should wrap in try/catch to handle potential failures gracefully.
diff block
+import { Jimp } from "jimp";
+import pixelmatch from "pixelmatch";
+
+export const compare = async (
+ actual: string,
+ expected: string,
+): Promise<{ diffBuffer: Buffer; width: number; height: number }> => {
+ const actualImage = await Jimp.read(actual);
+ const expectedImage = await Jimp.read(expected);
Greptile
greptile
logic: Missing error handling for image loading failures. If either image can't be read, this will throw an unhandled exception.
diff block
}
} catch (error) {
didError = true;
- console.log('\n\n' + error.message);
- console.log(file);
}
});
Greptile
greptile
logic: Missing error handling - script should exit with non-zero status when errors occur to fail CI/CD pipelines
suggested fix
+});
+if (didError) {
+ process.exit(1);
+}
diff block
+#!/bin/bash
+
+# Integration Tests for Raycast Alarms Extension
+# These tests verify the core functionality of the alarm system
+
+# Load test utilities
+source "$(dirname "$(dirname "$0")")/test-utils.sh"
+
+# Set up a patched version of the manage-crontab.sh script for testing
+setup_patched_script() {
+ # Make a copy of the original script
+ cp "$TEST_CONFIG_DIR/scripts/manage-crontab.sh" "$TEST_CONFIG_DIR/scripts/manage-crontab.sh.orig"
+
+ # Patch the script to use our mock crontab
+ sed "s|crontab -l|\"$(dirname "$(dirname "$0")")/mock-crontab.sh\" -l|g" \
+ "$TEST_CONFIG_DIR/scripts/manage-crontab.sh.orig" > "$TEST_CONFIG_DIR/scripts/manage-crontab.sh.tmp"
+
+ sed "s|crontab \"|\"$(dirname "$(dirname "$0")")/mock-crontab.sh\" \"|g" \
+ "$TEST_CONFIG_DIR/scripts/manage-crontab.sh.tmp" > "$TEST_CONFIG_DIR/scripts/manage-crontab.sh"
+
+ # Make it executable
+ chmod +x "$TEST_CONFIG_DIR/scripts/manage-crontab.sh"
+
+ rm -f "$TEST_CONFIG_DIR/scripts/manage-crontab.sh.tmp"
+}
+
+# Test: Adding an alarm
+test_add_alarm() {
+ local test_id="test_alarm_$(date +%s)"
+ local test_title="Test Alarm"
+ local test_hours="10"
+ local test_minutes="30"
+ local test_seconds="0"
+ local test_sound="/System/Library/Sounds/Submarine.aiff"
+
+ # Add the alarm
+ run_manage_crontab add "$test_id" "$test_title" "$test_hours" "$test_minutes" "$test_seconds" "$test_sound"
+
+ # Check if the alarm was added to the data file
+ if file_contains "$TEST_CONFIG_DIR/alarms.data" "$test_id"; then
+ log_success "Alarm was added to data file"
+ else
+ log_failure "Alarm was not added to data file"
+ return 1
+ fi
+
+ # Check the format of the entry (should be pipe-delimited)
+ local pattern="$test_id|$test_title|10:30|$test_sound"
+ if file_contains "$TEST_CONFIG_DIR/alarms.data" "$test_id"; then
+ log_success "Alarm entry has correct format"
+ else
+ log_failure "Alarm entry format is incorrect"
+ return 1
+ fi
+
+ return 0
+}
+
+# Test: Listing alarms
+test_list_alarms() {
+ # List alarms and capture output
+ local list_output=$(run_manage_crontab list)
+
+ # Check if the output is valid JSON
+ echo "$list_output" | jq '.' > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ log_success "List output is valid JSON"
+ else
+ log_failure "List output is not valid JSON"
+ echo "Output was: $list_output"
+ return 1
+ fi
+
+ # Check if our test alarm is in the list
+ if echo "$list_output" | grep -q "test_alarm"; then
+ log_success "Test alarm found in alarm list"
+ else
+ log_failure "Test alarm not found in alarm list"
+ echo "Output was: $list_output"
+ return 1
+ fi
+
+ return 0
+}
+
+# Test: Removing an alarm
+test_remove_alarm() {
+ # Get the ID of our test alarm
+ local test_id=$(grep "test_alarm" "$TEST_CONFIG_DIR/alarms.data" | cut -d'|' -f1)
+
+ if [ -z "$test_id" ]; then
+ log_failure "Could not find test alarm to remove"
+ return 1
+ fi
+
+ # Count alarms before removal
+ local before_count=$(count_lines "$TEST_CONFIG_DIR/alarms.data")
+
+ # Remove the alarm
+ run_manage_crontab remove "$test_id"
+
+ # Count alarms after removal
+ local after_count=$(count_lines "$TEST_CONFIG_DIR/alarms.data")
+
+ # Check if the count decreased
+ if [ "$after_count" -lt "$before_count" ]; then
+ log_success "Alarm count decreased after removal"
+ else
+ log_failure "Alarm count did not decrease after removal"
+ return 1
+ fi
+
+ # Check that the alarm ID is no longer in the file
+ if ! file_contains "$TEST_CONFIG_DIR/alarms.data" "$test_id"; then
+ log_success "Alarm was removed from data file"
+ else
+ log_failure "Alarm still exists in data file after removal"
+ return 1
+ fi
+
+ return 0
+}
+
+# Test: Adding and removing multiple alarms
+test_multiple_alarms() {
+ # Add several test alarms
+ for i in {1..5}; do
+ local test_id="multi_test_alarm_$i"
+ local test_title="Test Alarm $i"
+ local test_hours=$((10 + i))
+ local test_minutes=$((i * 10))
+
+ run_manage_crontab add "$test_id" "$test_title" "$test_hours" "$test_minutes" "0" "/System/Library/Sounds/Submarine.aiff" > /dev/null
+ done
+
+ # List alarms
+ local list_output=$(run_manage_crontab list)
+
+ # Check if we have all 5 alarms
+ local alarm_count=$(echo "$list_output" | grep -o "multi_test_alarm" | wc -l)
+ if [ "$alarm_count" -eq 5 ]; then
+ log_success "All 5 test alarms were added successfully"
+ else
+ log_failure "Expected 5 test alarms, but found $alarm_count"
+ return 1
+ fi
+
+ # Remove the alarms one by one
+ for i in {1..5}; do
+ local test_id="multi_test_alarm_$i"
+ run_manage_crontab remove "$test_id" > /dev/null
+
+ # Check if it was removed
+ if ! file_contains "$TEST_CONFIG_DIR/alarms.data" "$test_id"; then
+ log_success "Alarm $test_id was removed successfully"
+ else
+ log_failure "Alarm $test_id was not removed"
+ return 1
+ fi
+ done
+
+ return 0
+}
+
+# Test: Scheduled time formatting
+test_time_formatting() {
+ # Test single-digit hours and minutes
+ local test_id="format_test_alarm"
+
+ # Test with hour=9, minute=5
+ run_manage_crontab add "$test_id" "Format Test" "9" "5" "0" "/System/Library/Sounds/Submarine.aiff" > /dev/null
+
+ # Check if the formatted time has leading zeros (should be 09:05)
+ if file_contains "$TEST_CONFIG_DIR/alarms.data" "$test_id|Format Test|09:05"; then
+ log_success "Time was correctly formatted with leading zeros"
+ else
+ log_failure "Time formatting failed for single-digit values"
+ cat "$TEST_CONFIG_DIR/alarms.data"
+ return 1
+ fi
+
+ # Clean up
+ run_manage_crontab remove "$test_id" > /dev/null
+
+ return 0
+}
+
+# Main test runner
+run_tests() {
+ echo -e "\n${BOLD}${BLUE}===================================${NC}"
+ echo -e "${BOLD}${BLUE} Raycast Alarms Integration Tests ${NC}"
+ echo -e "${BOLD}${BLUE}===================================${NC}\n"
+
+ local tests_passed=0
+ local tests_failed=0
+
+ # Setup test environment
+ setup_test_env
+ setup_patched_script
Greptile
greptile
logic: Missing error handling if setup_test_env or setup_patched_script fails
diff block
+#!/bin/bash
+
+# Integration Tests for Raycast Alarms Trigger Functionality
+# These tests verify the alarm triggering mechanism
+
+# Load test utilities
+source "$(dirname "$(dirname "$0")")/test-utils.sh"
+
+# Test: Simulation of alarm triggering
+test_alarm_trigger() {
+ local test_id="trigger_test_alarm_$(date +%s)"
+ local test_title="Trigger Test Alarm"
+ local test_sound="/System/Library/Sounds/Submarine.aiff"
+
+ # Create an active directory
+ mkdir -p "$TEST_CONFIG_DIR/active"
+
+ # Run the trigger script with nohup to avoid actual sound playing and dialogs
+ # Just test that the script executes without errors
+ nohup "$TEST_CONFIG_DIR/scripts/trigger-alarm.sh" "$test_id" "$test_title" "$test_sound" "0" > "$TEST_CONFIG_DIR/nohup.out" 2>&1 &
+
+ # Store process ID
+ local pid=$!
+
+ # Wait a moment for it to initialize
+ sleep 1
+
+ # Check if the active file was created
+ if [ -f "$TEST_CONFIG_DIR/active/$test_id" ]; then
+ log_success "Active alarm file was created"
+ else
+ log_failure "Active alarm file was not created"
+ return 1
+ fi
+
+ # Check if the loop control file exists
+ if [ -f "$TEST_CONFIG_DIR/active/${test_id}_loop" ]; then
+ log_success "Loop control file was created"
+ else
+ log_failure "Loop control file was not created"
+ return 1
+ fi
+
+ # Kill the test process
+ if kill -0 $pid > /dev/null 2>&1; then
+ kill $pid
+ log_success "Successfully terminated test alarm process"
+ fi
+
+ # Clean up
+ rm -f "$TEST_CONFIG_DIR/active/$test_id"
+ rm -f "$TEST_CONFIG_DIR/active/${test_id}_loop"
+
+ return 0
+}
+
+# Test: Verify automatic cleanup when alarm stopped
+test_alarm_cleanup() {
+ local test_id="cleanup_test_alarm_$(date +%s)"
+
+ # Create mock active files
+ mkdir -p "$TEST_CONFIG_DIR/active"
+ echo "12345" > "$TEST_CONFIG_DIR/active/$test_id"
+ echo "1" > "$TEST_CONFIG_DIR/active/${test_id}_loop"
+
+ # Create a mock crontab entry
+ echo "30 10 * * * $TEST_CONFIG_DIR/scripts/trigger-alarm.sh $test_id \"Test Title\" /path/to/sound.aiff 0" > "/tmp/raycast-alarms-test/mock-crontab"
+
+ # Run the cleanup part of the trigger script (simulated)
+ # We'll create a temporary script to simulate the cleanup
+ cat << 'EOF' > "$TEST_CONFIG_DIR/temp_cleanup.sh"
+#!/bin/bash
+alarm_id="$1"
+active_file="$TEST_CONFIG_DIR/active/$alarm_id"
+loop_control_file="$TEST_CONFIG_DIR/active/${alarm_id}_loop"
+
+# Remove the loop control file
+rm -f "$loop_control_file"
+
+# Remove the active file
+rm -f "$active_file"
+
+# Remove from crontab
+"$TEST_CONFIG_DIR/scripts/manage-crontab.sh" remove "$alarm_id"
+EOF
+
+ chmod +x "$TEST_CONFIG_DIR/temp_cleanup.sh"
+
+ # Run the cleanup script
+ "$TEST_CONFIG_DIR/temp_cleanup.sh" "$test_id"
+
+ # Check if files were removed
+ if [ ! -f "$TEST_CONFIG_DIR/active/$test_id" ] && [ ! -f "$TEST_CONFIG_DIR/active/${test_id}_loop" ]; then
+ log_success "Alarm files were properly cleaned up"
+ else
+ log_failure "Alarm files were not cleaned up"
+ return 1
+ fi
+
+ # Check if crontab entry was removed
+ if ! grep -q "$test_id" "/tmp/raycast-alarms-test/mock-crontab"; then
+ log_success "Crontab entry was removed"
+ else
+ log_failure "Crontab entry was not removed"
+ return 1
+ fi
+
+ return 0
+}
+
+# Main test runner
+run_trigger_tests() {
+ echo -e "\n${BOLD}${BLUE}========================================${NC}"
+ echo -e "${BOLD}${BLUE} Raycast Alarms Trigger Function Tests ${NC}"
+ echo -e "${BOLD}${BLUE}========================================${NC}\n"
+
+ local tests_passed=0
+ local tests_failed=0
+
+ # Setup test environment
+ setup_test_env
+
+ # Patch the manage-crontab script
+ setup_patched_script
Greptile
greptile
logic: Missing error handling if setup_test_env or setup_patched_script fails
suggested fix
# Setup test environment
+ if ! setup_test_env; then
+ log_failure "Failed to setup test environment"
return 1
fi
# Patch the manage-crontab script
+ if ! setup_patched_script; then
+ log_failure "Failed to patch manage-crontab script"
return 1
fi
diff block
+#!/bin/sh
+
+# Raycast Alarms Extension - Installation Script
+# Crafted with ♥ for developers who value precision and reliability
+
+# ANSI color codes for better visual hierarchy
+GREEN='\033[0;32m'
+BLUE='\033[0;34m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+BOLD='\033[1m'
+
+# Helper function for printing beautifully formatted messages
+print_step() {
+ echo "${BOLD}${BLUE}[⚙️ Setup]${NC} $1"
+}
+
+print_success() {
+ echo "${GREEN}[✓ Success]${NC} $1"
+}
+
+print_info() {
+ echo "${BLUE}[ℹ️ Info]${NC} $1"
+}
+
+print_warning() {
+ echo "${YELLOW}[⚠️ Warning]${NC} $1"
+}
+
+print_error() {
+ echo "${RED}[❌ Error]${NC} $1"
+}
+
+# Print welcome banner
+echo "\n${BOLD}${BLUE}===============================================${NC}"
+echo "${BOLD}${BLUE} Raycast Alarms Extension Installer ${NC}"
+echo "${BOLD}${BLUE}===============================================${NC}"
+echo "${BLUE}Building your perfect alarm system, one script at a time${NC}\n"
+
+# Find script directory
+SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+print_info "Installation source: ${BOLD}$SCRIPT_DIR${NC}"
+
+# Setup the config directory
+CONFIG_DIR="$HOME/.raycast-alarms"
+print_step "Creating workspace at ${BOLD}$CONFIG_DIR${NC}"
+
+# Create required directories
+mkdir -p "$CONFIG_DIR/scripts"
+mkdir -p "$CONFIG_DIR/logs"
+mkdir -p "$CONFIG_DIR/active"
+print_success "Directory structure prepared"
+
+# Verify source files exist
+TRIGGER_SCRIPT="$SCRIPT_DIR/trigger-alarm.sh"
+CRONTAB_SCRIPT="$SCRIPT_DIR/manage-crontab.sh"
+APPLESCRIPT="$SCRIPT_DIR/show-alarm-popup.applescript"
+
+print_step "Verifying source files..."
+
+missing_files=0
+if [ ! -f "$TRIGGER_SCRIPT" ]; then
+ print_error "Missing trigger script: $TRIGGER_SCRIPT"
+ missing_files=$((missing_files + 1))
+fi
+
+if [ ! -f "$CRONTAB_SCRIPT" ]; then
+ print_error "Missing crontab management script: $CRONTAB_SCRIPT"
+ missing_files=$((missing_files + 1))
+fi
+
+if [ ! -f "$APPLESCRIPT" ]; then
+ print_error "Missing notification dialog script: $APPLESCRIPT"
+ missing_files=$((missing_files + 1))
+fi
+
+if [ $missing_files -gt 0 ]; then
+ print_error "Installation aborted due to missing source files"
+ exit 1
+fi
+
+print_success "All source files verified"
+
+# Copy scripts to config directory and make them executable
+print_step "Deploying scripts to your system..."
+
+cp "$TRIGGER_SCRIPT" "$CONFIG_DIR/scripts/"
+cp "$CRONTAB_SCRIPT" "$CONFIG_DIR/scripts/"
+cp "$APPLESCRIPT" "$CONFIG_DIR/scripts/"
Greptile
greptile
logic: Missing error handling for failed file copy operations
suggested fix
+if ! cp "$TRIGGER_SCRIPT" "$CONFIG_DIR/scripts/" || \
+ ! cp "$CRONTAB_SCRIPT" "$CONFIG_DIR/scripts/" || \
+ ! cp "$APPLESCRIPT" "$CONFIG_DIR/scripts/"; then
+ print_error "Failed to copy required scripts"
exit 1
fi
diff block
+import React from "react";
+import { List, ActionPanel, Action, Icon, showToast, Toast } from "@raycast/api";
+import { useCallback, useState, useEffect } from "react";
+import { spawn } from "child_process";
+import os from "os";
+import { open } from "@raycast/api";
+import fs from "fs";
+import { initializeExtension } from "./utils/initialize";
+
+// Path to the manage-crontab.sh script
+const SCRIPT_PATH = `${os.homedir()}/.raycast-alarms/scripts/manage-crontab.sh`;
+
+interface AlarmInfo {
+ id: string;
+ title: string;
+ time: string;
+ sound: string;
+ cronExpression?: string;
+ name?: string; // For compatibility
+}
+
+// Format time to show only hours:minutes (not seconds)
+const formatTime = (timeString: string): string => {
+ // If time contains seconds (HH:MM:SS), remove the seconds part
+ if (timeString && timeString.includes(":")) {
+ const parts = timeString.split(":");
+ if (parts.length >= 2) {
+ return `${parts[0]}:${parts[1]}`;
+ }
+ }
+ return timeString;
+};
+
+// Execute command function
+const execCommand = async (
+ command: string,
+ args: string[]
+): Promise<{ stdout: string; stderr: string; code: number }> => {
+ return new Promise((resolve) => {
Greptile
greptile
logic: Promise is missing error handling for spawn failures - should include a catch block
suggested fix
+ return new Promise((resolve, reject) => {
diff block
+#!/bin/bash
+
+# Integration Tests for Raycast Alarms Trigger Functionality
+# These tests verify the alarm triggering mechanism
+
+# Load test utilities
+source "$(dirname "$(dirname "$0")")/test-utils.sh"
+
+# Test: Simulation of alarm triggering
+test_alarm_trigger() {
+ local test_id="trigger_test_alarm_$(date +%s)"
+ local test_title="Trigger Test Alarm"
+ local test_sound="/System/Library/Sounds/Submarine.aiff"
+
+ # Create an active directory
+ mkdir -p "$TEST_CONFIG_DIR/active"
+
+ # Run the trigger script with nohup to avoid actual sound playing and dialogs
+ # Just test that the script executes without errors
+ nohup "$TEST_CONFIG_DIR/scripts/trigger-alarm.sh" "$test_id" "$test_title" "$test_sound" "0" > "$TEST_CONFIG_DIR/nohup.out" 2>&1 &
+
+ # Store process ID
+ local pid=$!
+
+ # Wait a moment for it to initialize
+ sleep 1
+
+ # Check if the active file was created
+ if [ -f "$TEST_CONFIG_DIR/active/$test_id" ]; then
+ log_success "Active alarm file was created"
+ else
+ log_failure "Active alarm file was not created"
+ return 1
+ fi
+
+ # Check if the loop control file exists
+ if [ -f "$TEST_CONFIG_DIR/active/${test_id}_loop" ]; then
+ log_success "Loop control file was created"
+ else
+ log_failure "Loop control file was not created"
+ return 1
+ fi
+
+ # Kill the test process
+ if kill -0 $pid > /dev/null 2>&1; then
+ kill $pid
+ log_success "Successfully terminated test alarm process"
+ fi
Greptile
greptile
logic: Missing error handling for kill command failure. Should check return code and log error if kill fails
suggested fix
# Kill the test process
if kill -0 $pid > /dev/null 2>&1; then
+ if kill $pid; then
log_success "Successfully terminated test alarm process"
else
+ log_failure "Failed to terminate test alarm process"
return 1
fi
fi
diff block
return connector
return _connector_factory
+
+
+@pytest.fixture(scope="session", autouse=True)
+def set_resource_limits() -> None:
+ # the google sdk is aggressive about using up file descriptors and
+ # macos is stingy ... these tests will fail randomly unless the descriptor limit is raised
+ RLIMIT_MINIMUM = 2048
+ soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
+ desired_soft = min(RLIMIT_MINIMUM, hard) # Pick your target here
+
+ print(f"Open file limit: soft={soft} hard={hard} soft_required={RLIMIT_MINIMUM}")
+
+ if soft < desired_soft:
+ print(f"Raising open file limit: {soft} -> {desired_soft}")
+ resource.setrlimit(resource.RLIMIT_NOFILE, (desired_soft, hard))
+
+ soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
+ print(f"New open file limit: soft={soft} hard={hard}")
+ return
Greptile
greptile
logic: Missing error handling if setrlimit fails due to insufficient permissions
diff block
+import { Form, ActionPanel, Action, showToast, Toast, useNavigation } from "@raycast/api";
+import { useForm, FormValidation, usePromise } from "@raycast/utils";
+import { getSavedItems, setSavedItems } from "../utilities/storage";
+import { EntryDirectReplace, RegexItemDirectReplace } from "../types";
+import { nanoid } from "nanoid";
+import { Fragment, PropsWithChildren, useState } from "react";
+
+export interface FormDirectReplaceProps extends PropsWithChildren {
+ initialValues: EntryDirectReplace;
+ isNew?: boolean;
+}
+
+export default function FormDirectReplace({ initialValues, isNew, children }: FormDirectReplaceProps) {
+ const { pop } = useNavigation();
+
+ const emptyRegexItem: RegexItemDirectReplace = {
+ id: nanoid(),
+ regex: "",
+ replacement: "",
+ matchCaseInsensitive: false,
+ matchGlobally: true,
+ matchMultiline: true,
+ };
+ const { data: replacementEntries, isLoading } = usePromise(getSavedItems);
+
+ const [regexItems, setRegexItems] = useState<RegexItemDirectReplace[]>(initialValues?.regexItems || [emptyRegexItem]);
+
+ function addRegexItem() {
+ setRegexItems((prev) => [...prev, emptyRegexItem]);
+ }
+
+ function updateRegexItem(index: number, updatedItem: RegexItemDirectReplace) {
+ setRegexItems((prev) => prev.map((item, idx) => (idx === index ? updatedItem : item)));
+ }
+
+ const { handleSubmit, itemProps } = useForm<EntryDirectReplace>({
+ initialValues,
+ onSubmit(values) {
+ if (isNew || !replacementEntries || replacementEntries.length < 1) {
+ replacementEntries?.push({
+ ...values,
+ id: nanoid(),
+ type: "directReplace",
+ regexItems,
+ });
+ } else {
+ const itemIndex = replacementEntries?.findIndex((e) => e.id === initialValues.id);
Greptile
greptile
logic: Missing error handling if itemIndex is -1 (item not found).
suggested fix
const itemIndex = replacementEntries?.findIndex((e) => e.id === initialValues.id);
+ if (itemIndex === -1) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Error",
+ message: "Could not find item to update"
});
+ return;
}
diff block
newProxyMapping.data.helicone_proxy_key = proxyKey;
if (limits.length > 0) {
- const insertLimits = await getSupabaseServer()
- .from("helicone_proxy_key_limits")
- .insert(
- limits.map((limit) => ({
- id: crypto.randomUUID(),
- helicone_proxy_key: proxyKeyId,
- timewindow_seconds: limit.timewindow_seconds,
- count: limit.count,
- cost: limit.cost,
- currency: limit.currency,
- }))
+ for (const limit of limits) {
+ await dbExecute(
+ `INSERT INTO helicone_proxy_key_limits (id, helicone_proxy_key, timewindow_seconds, count, cost, currency) VALUES ($1, $2, $3, $4, $5, $6)`,
+ [
+ crypto.randomUUID(),
+ proxyKeyId,
+ limit.timewindow_seconds,
+ limit.count,
+ limit.cost,
+ limit.currency,
+ ]
);
- if (insertLimits.error) {
- const remove = await getSupabaseServer()
- .from("helicone_proxy_keys")
- .delete()
- .eq("id", proxyKeyId);
- console.error("Failed to insert limits, removing proxy key", remove);
-
- console.error("Failed to insert limits", insertLimits.error);
- res.status(500).json({ error: insertLimits.error.message, data: null });
- return;
}
Greptile
greptile
logic: Missing error handling for limits insertion; consider checking return errors and rolling back proxy key insertion if a failure occurs.
diff block
await document.exitFullscreen()
}
},
+ showNextRecordingConfirmation: () => {
+ if (props.playlistLogic) {
+ props.playlistLogic.actions.loadNext()
+ }
+ },
+ confirmNextRecording: async () => {
+ // Mark all similar recordings as viewed
+ await Promise.all(
+ values.similarRecordings.map((recordingId: SessionRecordingType['id']) =>
+ api.recordings.update(recordingId, {
+ viewed: true,
+ })
+ )
+ )
+ actions.hideNextRecordingConfirmation()
+ if (props.playlistLogic) {
+ props.playlistLogic.actions.loadNext()
+ }
+ },
+ loadSimilarRecordings: async () => {
+ if (values.featureFlags[FEATURE_FLAGS.RECORDINGS_SIMILAR_RECORDINGS]) {
+ const response = await api.recordings.getSimilarRecordings(values.sessionRecordingId)
+ actions.loadSimilarRecordingsSuccess(response.count)
+ actions.setSimilarRecordings(response.results)
+ }
+ },
Greptile
greptile
style: Missing error handling for the API call. Consider adding try/catch to handle network failures gracefully.
diff block
+<!doctype html>
+<html>
+ <head>
+ <title>Nest Camera Stream</title>
+ <script src="https://cdn.jsdelivr.net/npm/hls.js@latest"></script>
+ <style>
+ body {
+ margin: 0;
+ padding: 0;
+ background: #000;
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ height: 100vh;
+ }
+ video {
+ max-width: 100%;
+ max-height: 100vh;
+ }
+ </style>
+ </head>
+ <body>
+ <video id="video" controls autoplay muted></video>
+ <script>
+ const video = document.getElementById("video");
+ const streamUrl = new URL(window.location.href).searchParams.get("stream");
Greptile
greptile
logic: Missing error handling if streamUrl is null or invalid
suggested fix
const streamUrl = new URL(window.location.href).searchParams.get("stream");
+ if (!streamUrl) {
+ console.error('Stream URL is missing. Add ?stream=<url> to the page URL.');
+ document.body.innerHTML = '<div style="color: white; text-align: center;">Error: Stream URL is missing</div>';
}
diff block
+import { fetchDisplayModeList } from "../commands";
+
+type Input = {
+ /**
+ * The tagID of the display.
+ */
+ tagID: string;
+};
+
+/**
+ * This command allows you to get the possible resolutions of a display you have a tagID for.
+ * The resolution are presented as a text, each resolution is a line.
+ * like `0 - 800x600 60Hz 10bpc` where the first number identifies the resolution (resolution ID).
+ * the resolution itself is the second part, the refresh rate is the third part and the last part is the bit depth.
+ * Some resolutions can be marked unsafe, native, default and current.
+ * The current resolution is the one that is currently used by the screen.
+ * Present the options in the form of a markdown table.
+ */
+export default function tool(input: Input) {
+ const resolutions = fetchDisplayModeList(input.tagID);
+ return resolutions;
+}
Greptile
greptile
logic: Missing error handling for fetchDisplayModeList call. Should wrap in try/catch to handle potential failures gracefully. ```suggestion export default function tool(input: Input) { + try { const resolutions = fetchDisplayModeList(input.tagID); return resolutions; + } catch (error) { + throw new Error(`Failed to fetch display resolutions: ${error.message}`); } } ```
diff block
+import React from 'react';
+
+import { validate } from 'email-validator';
+import { useMemoizedFn } from 'ahooks';
+import { Button, Divider, Input } from 'antd';
+import { BusterShare, ShareRole, ShareAssetType } from '@/api/asset_interfaces';
+import type { ShareRequest } from '@/api/buster_socket/shared_interfaces';
+import { Text } from '@/components/text';
+import { AppMaterialIcons } from '@/components/icons';
+import { AccessDropdown } from './AccessDropdown';
+import { IndividualSharePerson } from './IndividualSharePerson';
+import { ShareMenuContentEmbed } from './ShareMenuContentEmbed';
+import { ShareMenuContentPublish } from './ShareMenuContentPublish';
+import { ShareWithGroupAndTeam } from './ShareWithTeamAndGroup';
+import { ShareMenuTopBarOptions } from './ShareMenuTopBar';
+import { useUserConfigContextSelector } from '@/context/Users';
+import { useBusterDashboardContextSelector } from '@/context/Dashboards';
+import { useBusterCollectionIndividualContextSelector } from '@/context/Collections';
+import { useBusterMetricsIndividualContextSelector } from '@/context/Metrics';
+import { useStyles } from './useStyles';
+import { inputHasText } from '@/utils/text';
+
+export const ShareMenuContentBody: React.FC<{
+ selectedOptions: ShareMenuTopBarOptions;
+ setOpenShareWithGroupAndTeam: (open: boolean) => void;
+ goBack: () => void;
+ onCopyLink: () => void;
+ shareAssetConfig: BusterShare;
+ assetId: string;
+ assetType: ShareAssetType;
+}> = React.memo(
+ ({
+ onCopyLink,
+ shareAssetConfig,
+ selectedOptions,
+ assetId,
+ assetType,
+ goBack,
+ setOpenShareWithGroupAndTeam
+ }) => {
+ const Component = ContentRecord[selectedOptions];
+
+ const selectedClass = selectedOptions === ShareMenuTopBarOptions.Share ? 'pt-3' : '';
+ const individual_permissions = shareAssetConfig.individual_permissions;
+ const team_permissions = shareAssetConfig.team_permissions;
+ const organization_permissions = shareAssetConfig.organization_permissions;
+ const publicly_accessible = shareAssetConfig.publicly_accessible;
+ const publicExpirationDate = shareAssetConfig.public_expiry_date;
+ const password = shareAssetConfig.public_password;
+
+ return (
+ <div className={selectedClass}>
+ <Component
+ setOpenShareWithGroupAndTeam={setOpenShareWithGroupAndTeam}
+ goBack={goBack}
+ onCopyLink={onCopyLink}
+ individual_permissions={individual_permissions}
+ team_permissions={team_permissions}
+ organization_permissions={organization_permissions}
+ publicly_accessible={publicly_accessible}
+ publicExpirationDate={publicExpirationDate}
+ password={password}
+ assetId={assetId}
+ assetType={assetType}
+ />
+ </div>
+ );
+ }
+);
+ShareMenuContentBody.displayName = 'ShareMenuContentBody';
+
+const ShareMenuContentShare: React.FC<{
+ setOpenShareWithGroupAndTeam: (open: boolean) => void;
+ individual_permissions: BusterShare['individual_permissions'];
+ assetType: ShareAssetType;
+ assetId: string;
+}> = React.memo(({ setOpenShareWithGroupAndTeam, assetType, individual_permissions, assetId }) => {
+ const userTeams = useUserConfigContextSelector((state) => state.userTeams);
+ const onShareMetric = useBusterMetricsIndividualContextSelector((state) => state.onShareMetric);
+ const onShareDashboard = useBusterDashboardContextSelector((state) => state.onShareDashboard);
+ const onShareCollection = useBusterCollectionIndividualContextSelector(
+ (state) => state.onShareCollection
+ );
+ const [inputValue, setInputValue] = React.useState<string>('');
+ const [isInviting, setIsInviting] = React.useState<boolean>(false);
+ const [defaultPermissionLevel, setDefaultPermissionLevel] = React.useState<ShareRole>(
+ ShareRole.VIEWER
+ );
+ const disableSubmit = !inputHasText(inputValue) || !validate(inputValue);
+ const hasUserTeams = userTeams.length > 0;
+
+ const onSubmitNewEmail = useMemoizedFn(async () => {
+ const isValidEmail = validate(inputValue);
+ if (!isValidEmail) {
+ alert('Invalid email address');
+ return;
+ }
+
+ const payload = {
+ id: assetId,
+ user_permissions: [
+ {
+ user_email: inputValue,
+ role: defaultPermissionLevel
+ }
+ ]
+ };
+
+ setIsInviting(true);
+ if (assetType === ShareAssetType.METRIC) {
+ await onShareMetric(payload);
+ } else if (assetType === ShareAssetType.DASHBOARD) {
+ await onShareDashboard(payload);
+ } else if (assetType === ShareAssetType.COLLECTION) {
+ await onShareCollection(payload);
+ }
+ setIsInviting(false);
+ setInputValue('');
+ });
Greptile
greptile
Missing error handling for failed share operations. Add try/catch to handle API errors and reset isInviting state
suggested fix
setIsInviting(true);
+ try {
if (assetType === ShareAssetType.METRIC) {
await onShareMetric(payload);
} else if (assetType === ShareAssetType.DASHBOARD) {
await onShareDashboard(payload);
} else if (assetType === ShareAssetType.COLLECTION) {
await onShareCollection(payload);
}
setInputValue('');
+ } catch (error) {
+ message.error('Failed to share. Please try again.');
+ } finally {
setIsInviting(false);
}
});