179 examples
Race condition
Program outcome depends unpredictably on event timing.
[ FAQ1 ]
What is a race condition?
A race condition occurs when two or more concurrent operations or threads access and modify shared data simultaneously without proper synchronization. The final outcome of these operations becomes dependent on the exact timing and order in which the threads execute, leading to inconsistent or unpredictable behavior. For instance, two threads simultaneously updating a shared counter might produce incorrect final values due to simultaneous access. Such issues compromise thread safety, introduce subtle bugs, and can be challenging to reproduce or debug due to their timing-dependent nature.
[ FAQ2 ]
How to fix race conditions
To fix race conditions, implement synchronization mechanisms to control concurrent access to shared resources. Utilize mutexes (locks) or critical sections to ensure that only one thread at a time accesses or modifies sensitive data. Adopt thread-safe programming practices, clearly defining how threads interact with shared variables and resources. In asynchronous programming scenarios, carefully manage the timing and order of operations through appropriate use of promises, async/await patterns, or event-driven design. Additionally, consistently testing concurrency scenarios and employing debugging tools specifically designed for detecting race conditions helps identify and resolve issues proactively.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, potentially causing race conditions if multiple runs occur simultaneously
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state. Consider passing state as parameters.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple concurrent runs modify changedFiles - consider using a lock or making changedFiles function-scoped
diff block
+import { useCommandMenu } from '@/command-menu/hooks/useCommandMenu';
+import { useFilteredObjectMetadataItems } from '@/object-metadata/hooks/useFilteredObjectMetadataItems';
+import { useRightDrawer } from '@/ui/layout/right-drawer/hooks/useRightDrawer';
+import { RightDrawerPages } from '@/ui/layout/right-drawer/types/RightDrawerPages';
+import {
+ WorkflowTriggerType,
+ WorkflowWithCurrentVersion,
+} from '@/workflow/types/Workflow';
+import { workflowSelectedNodeState } from '@/workflow/workflow-diagram/states/workflowSelectedNodeState';
+import { RightDrawerStepListContainer } from '@/workflow/workflow-steps/components/RightDrawerWorkflowSelectStepContainer';
+import { RightDrawerWorkflowSelectStepTitle } from '@/workflow/workflow-steps/components/RightDrawerWorkflowSelectStepTitle';
+import { DATABASE_TRIGGER_TYPES } from '@/workflow/workflow-trigger/constants/DatabaseTriggerTypes';
+import { OTHER_TRIGGER_TYPES } from '@/workflow/workflow-trigger/constants/OtherTriggerTypes';
+import { TRIGGER_STEP_ID } from '@/workflow/workflow-trigger/constants/TriggerStepId';
+import { useUpdateWorkflowVersionTrigger } from '@/workflow/workflow-trigger/hooks/useUpdateWorkflowVersionTrigger';
+import { getTriggerDefaultDefinition } from '@/workflow/workflow-trigger/utils/getTriggerDefaultDefinition';
+import { useIsFeatureEnabled } from '@/workspace/hooks/useIsFeatureEnabled';
+import { useSetRecoilState } from 'recoil';
+import { MenuItemCommand, useIcons } from 'twenty-ui';
+import { FeatureFlagKey } from '~/generated-metadata/graphql';
+
+export const CommandMenuWorkflowSelectTriggerTypeContent = ({
+ workflow,
+}: {
+ workflow: WorkflowWithCurrentVersion;
+}) => {
+ const { getIcon } = useIcons();
+ const { updateTrigger } = useUpdateWorkflowVersionTrigger({ workflow });
+
+ const { activeObjectMetadataItems } = useFilteredObjectMetadataItems();
+
+ const { openRightDrawer } = useRightDrawer();
+ const setWorkflowSelectedNode = useSetRecoilState(workflowSelectedNodeState);
+ const { openWorkflowEditStepInCommandMenu } = useCommandMenu();
+ const isCommandMenuV2Enabled = useIsFeatureEnabled(
+ FeatureFlagKey.IsCommandMenuV2Enabled,
+ );
+
+ const handleTriggerTypeClick = ({
+ type,
+ defaultLabel,
+ icon,
+ }: {
+ type: WorkflowTriggerType;
+ defaultLabel: string;
+ icon: string;
+ }) => {
+ return async () => {
+ await updateTrigger(
+ getTriggerDefaultDefinition({
+ defaultLabel,
+ type,
+ activeObjectMetadataItems,
+ }),
+ );
+
+ setWorkflowSelectedNode(TRIGGER_STEP_ID);
greptile
logic: Setting workflow node state before trigger update completes could cause race conditions
diff block
try:
with self.lock:
self.current_tasks += 1
- func(*args, **kwargs) # 在这里调用函数,传递*args和**kwargs
+ func(*args, **kwargs) # call the function here, passing *args and **kwargs.
greptile
logic: Potential race condition: current_tasks is incremented inside lock but func is called outside. If func raises an exception, current_tasks may not be decremented properly.
diff block
+import { useBusterWebSocket } from '@/context/BusterWebSocket';
+import { useMemoizedFn } from 'ahooks';
+import { type MutableRefObject } from 'react';
+import type { IBusterChat, IBusterChatMessage } from '../interfaces';
+
+export const useChatUpdate = ({
+ chatsRef,
+ chatsMessagesRef,
+ startTransition
+}: {
+ chatsRef: MutableRefObject<Record<string, IBusterChat>>;
+ chatsMessagesRef: MutableRefObject<Record<string, IBusterChatMessage>>;
+ startTransition: (fn: () => void) => void;
+}) => {
+ const busterSocket = useBusterWebSocket();
+
+ const onUpdateChat = useMemoizedFn(
+ async (newChatConfig: Partial<IBusterChat> & { id: string }, saveToServer: boolean = false) => {
+ chatsRef.current[newChatConfig.id] = {
+ ...chatsRef.current[newChatConfig.id],
+ ...newChatConfig
+ };
greptile
potential race condition if chatsRef.current[newChatConfig.id] is undefined when spreading
suggested fix
chatsRef.current[newChatConfig.id] = {
+ ...(chatsRef.current[newChatConfig.id] || {}),
...newChatConfig
};
diff block
+import { useBusterWebSocket } from '@/context/BusterWebSocket';
+import { useMemoizedFn } from 'ahooks';
+import { type MutableRefObject } from 'react';
+import type { IBusterChat, IBusterChatMessage } from '../interfaces';
+
+export const useChatUpdate = ({
+ chatsRef,
+ chatsMessagesRef,
+ startTransition
+}: {
+ chatsRef: MutableRefObject<Record<string, IBusterChat>>;
+ chatsMessagesRef: MutableRefObject<Record<string, IBusterChatMessage>>;
+ startTransition: (fn: () => void) => void;
+}) => {
+ const busterSocket = useBusterWebSocket();
+
+ const onUpdateChat = useMemoizedFn(
+ async (newChatConfig: Partial<IBusterChat> & { id: string }, saveToServer: boolean = false) => {
+ chatsRef.current[newChatConfig.id] = {
+ ...chatsRef.current[newChatConfig.id],
+ ...newChatConfig
+ };
greptile
logic: potential race condition if chatsRef.current[newChatConfig.id] doesn't exist yet - should check existence first
suggested fix
chatsRef.current[newChatConfig.id] = {
+ ...(chatsRef.current[newChatConfig.id] || {}),
...newChatConfig
};
diff block
+import { useBusterWebSocket } from '@/context/BusterWebSocket';
+import { useMemoizedFn } from 'ahooks';
+import { type MutableRefObject } from 'react';
+import type { IBusterChat, IBusterChatMessage } from '../interfaces';
+
+export const useChatUpdate = ({
+ chatsRef,
+ chatsMessagesRef,
+ startTransition
+}: {
+ chatsRef: MutableRefObject<Record<string, IBusterChat>>;
+ chatsMessagesRef: MutableRefObject<Record<string, IBusterChatMessage>>;
+ startTransition: (fn: () => void) => void;
+}) => {
+ const busterSocket = useBusterWebSocket();
+
+ const onUpdateChat = useMemoizedFn(
+ async (newChatConfig: Partial<IBusterChat> & { id: string }, saveToServer: boolean = false) => {
+ chatsRef.current[newChatConfig.id] = {
+ ...chatsRef.current[newChatConfig.id],
+ ...newChatConfig
+ };
+ startTransition(() => {
+ //just used to trigger UI update
+
+ if (saveToServer) {
+ const { title, is_favorited, id } = chatsRef.current[newChatConfig.id];
+ busterSocket.emit({
+ route: '/chats/update',
+ payload: {
+ id,
+ title,
+ is_favorited
+ }
+ });
+ }
+ });
+ }
+ );
+
+ const onUpdateChatMessage = useMemoizedFn(
+ async (newMessageConfig: Partial<IBusterChatMessage> & { id: string }) => {
+ chatsMessagesRef.current[newMessageConfig.id] = {
+ ...chatsMessagesRef.current[newMessageConfig.id],
+ ...newMessageConfig
+ };
greptile
logic: similar race condition risk as with chats - should verify message exists before update
suggested fix
chatsMessagesRef.current[newMessageConfig.id] = {
+ ...(chatsMessagesRef.current[newMessageConfig.id] || {}),
...newMessageConfig
};
diff block
+package common
+
+import (
+ "context"
+ "fmt"
+ "github.com/nats-io/nats.go"
+ "github.com/nats-io/nats.go/jetstream"
+ "log"
+ "sync"
+ "time"
+)
+
+const (
+ DefaultStream = "defaultStream"
+)
+
+var (
+ streamRegistry = make(map[string]*JetStreamManager)
+ registryLock sync.RWMutex
+ streamInstances = make(map[string]jetstream.Stream)
+ streamInstLock sync.RWMutex
+)
+
+// RegisterManager registers a JetStreamManager with the specified streamID.
+func RegisterManager(streamID string, mgr *JetStreamManager) {
+ registryLock.Lock()
+ defer registryLock.Unlock()
+ streamRegistry[streamID] = mgr
+}
+
+// GetManager retrieves the JetStreamManager for the given streamID.
+// Returns the manager and true if found; otherwise returns nil and false.
+func GetManager(streamID string) (*JetStreamManager, bool) {
+ registryLock.RLock()
+ defer registryLock.RUnlock()
+ mgr, ok := streamRegistry[streamID]
+ return mgr, ok
+}
+
+// RegisterStreamInstances initializes the JetStream contexts (if needed),
+// creates or updates streams based on the provided JetStream configurations,
+// and stores the stream instances in a global map for later usage.
+// Parameters:
+//
+// nc - pointer to the NATS connection
+// cfgs - list of JetStreamConfig configurations to register
+func RegisterStreamInstances(nc *nats.Conn, cfgs []*JetStreamConfig) {
+ // Register managers for each provided configuration if not already registered.
+ if len(cfgs) > 0 {
+ for _, cfg := range cfgs {
+ if _, ok := GetManager(cfg.Name); !ok {
+ mgr := NewJetStream(cfg)
+ RegisterManager(cfg.Name, mgr)
+ } else {
+ log.Printf("manager for stream %q already registered", cfg.Name)
+ }
+ }
+ }
+
+ // Iterate through all registered stream managers to initialize JetStream and create stream instances.
+ for streamName, streamMgr := range streamRegistry {
+ streamInstLock.RLock()
+ _, exists := streamInstances[streamName]
+ streamInstLock.RUnlock()
+ if exists {
greptile
logic: RLock is held for too short a duration. The exists check and the subsequent write operation should be atomic to prevent race conditions.
suggested fix
+ streamInstLock.Lock()
_, exists := streamInstances[streamName]
if exists {
+ streamInstLock.Unlock()
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible if multiple runs occur simultaneously - changedFiles could be modified between null check and assignment
diff block
+package common
+
+import (
+ "context"
+ "fmt"
+ "github.com/nats-io/nats.go"
+ "github.com/nats-io/nats.go/jetstream"
+ "log"
+ "sync"
+ "time"
+)
+
+const (
+ DefaultStream = "defaultStream"
+)
+
+var (
+ streamRegistry = make(map[string]*JetStreamManager)
+ registryLock sync.RWMutex
+ streamInstances = make(map[string]jetstream.Stream)
+ streamInstLock sync.RWMutex
+)
+
+// RegisterManager registers a JetStreamManager with the specified streamID.
+func RegisterManager(streamID string, mgr *JetStreamManager) {
+ registryLock.Lock()
+ defer registryLock.Unlock()
+ streamRegistry[streamID] = mgr
+}
+
+// GetManager retrieves the JetStreamManager for the given streamID.
+// Returns the manager and true if found; otherwise returns nil and false.
+func GetManager(streamID string) (*JetStreamManager, bool) {
+ registryLock.RLock()
+ defer registryLock.RUnlock()
+ mgr, ok := streamRegistry[streamID]
+ return mgr, ok
+}
+
+// RegisterStreamInstances initializes the JetStream contexts (if needed),
+// creates or updates streams based on the provided JetStream configurations,
+// and stores the stream instances in a global map for later usage.
+// Parameters:
+//
+// nc - pointer to the NATS connection
+// cfgs - list of JetStreamConfig configurations to register
+func RegisterStreamInstances(nc *nats.Conn, cfgs []*JetStreamConfig) {
+ // Register managers for each provided configuration if not already registered.
+ if len(cfgs) > 0 {
+ for _, cfg := range cfgs {
+ if _, ok := GetManager(cfg.Name); !ok {
+ mgr := NewJetStream(cfg)
+ RegisterManager(cfg.Name, mgr)
+ } else {
+ log.Printf("manager for stream %q already registered", cfg.Name)
+ }
+ }
+ }
+
+ // Iterate through all registered stream managers to initialize JetStream and create stream instances.
+ for streamName, streamMgr := range streamRegistry {
+ streamInstLock.RLock()
+ _, exists := streamInstances[streamName]
+ streamInstLock.RUnlock()
+ if exists {
+ log.Printf("streamInstance %q already created", streamName)
+ continue
+ }
+ // Initialize JetStream context
+ if err := streamMgr.InitJetStream(nc); err != nil {
+ log.Printf("failed to initialize jetstream for stream %q: %v", streamName, err)
+ continue
+ }
+ ctx := context.Background()
+ stream, err := streamMgr.CreateStream(ctx)
+ if err != nil {
+ log.Printf("failed to create stream %q: %v", streamName, err)
+ continue
+ }
+ streamInstLock.Lock()
+ streamInstances[streamName] = stream
+ streamInstLock.Unlock()
+ log.Printf("streamInstance %q created", streamName)
+ }
greptile
style: Potential race condition when iterating over streamRegistry while other goroutines might modify it. Consider making a copy of the registry keys before iteration.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: listChangedFiles() is now awaited but changedFiles is spread synchronously. This could cause a race condition if listChangedFiles returns a Promise-like iterable.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
style: Cache initialization could be moved before the await to prevent potential race conditions with concurrent runs
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously - changedFiles could be modified between null check and assignment
diff block
+//
+// TextViewWrapper.swift
+// Onit
+//
+// Created by Kévin Naudin on 16/02/2025.
+//
+
+import Defaults
+import SwiftUI
+
+/// A custom TextView which :
+/// - Works like a TextField - press enter will call `onSubmit`
+/// - Has a dynamic height that is limited by `maxHeight`, when max height is reached the content become scrollable
+/// - Manage a placeholder
+struct TextViewWrapper: NSViewRepresentable {
+ @Binding var text: String
+ @Binding var dynamicHeight: CGFloat
+ var onSubmit: (() -> Void)? = nil
+ var maxHeight: CGFloat? = nil
+ var placeholder: String? = nil
+
+ var font: NSFont = AppFont.medium16.nsFont
+ var textColor: NSColor = .white
+ var placeholderColor: NSColor = .gray300
+
+ func makeNSView(context: Self.Context) -> NSScrollView {
+ let scrollView = NSScrollView()
+ let textView = CustomTextView(text: text,
+ customFont: font,
+ textColor: textColor,
+ placeholderColor: placeholderColor,
+ placeholder: placeholder)
+
+ textView.delegate = context.coordinator
+ scrollView.hasVerticalScroller = false
+ scrollView.drawsBackground = false
+ scrollView.borderType = .noBorder
+ scrollView.verticalScrollElasticity = .none
+ scrollView.hasVerticalRuler = false
+ scrollView.autohidesScrollers = true
+ scrollView.documentView = textView
+ scrollView.contentView.postsBoundsChangedNotifications = true
+ context.coordinator.textView = textView
+
+ /// First time the view appear with huge text
+ /// We should update the height and the scroll inset
+ DispatchQueue.main.async {
+ context.coordinator.updateHeight()
+
+ let contentHeight = textView.frame.height
+ let visibleHeight = scrollView.contentView.bounds.height
+ let newY = max(0, contentHeight - visibleHeight)
+
+ scrollView.contentView.scroll(NSPoint(x: 0, y: newY))
+ scrollView.reflectScrolledClipView(scrollView.contentView)
+ }
greptile
logic: Race condition possible between async height update and scroll position calculation. Consider combining these operations or using a completion handler.
diff block
>
{variableAsHogQL}
</code>
+ <LemonSwitch
+ size="xsmall"
+ label="Null"
+ checked={isNull}
+ onChange={(value) => {
+ setIsNull(value)
+ onChange(variable.id, null, value)
+ }}
greptile
logic: setting isNull and value separately could lead to race conditions. Consider combining into a single state update
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs happen simultaneously. The null check isn't sufficient for async execution.
diff block
+import { getPreferenceValues, LocalStorage } from "@raycast/api";
+import fetch from "node-fetch";
+import type { RequestInit } from "node-fetch";
+
+interface Preferences {
+ limitlessApiKey?: string;
+}
+
+interface LifelogContent {
+ type: "heading1" | "heading2" | "blockquote";
+ content: string;
+ startTime: string;
+ endTime: string;
+ startOffsetMs: number;
+ endOffsetMs: number;
+ children: LifelogContent[];
+ speakerName: string;
+ speakerIdentifier: "user" | null;
+}
+
+interface Lifelog {
+ id: string;
+ title: string;
+ markdown: string;
+ contents: LifelogContent[];
+}
+
+interface LifelogResponse {
+ data: {
+ lifelogs: Lifelog[];
+ };
+ meta: {
+ lifelogs: {
+ nextCursor: string;
+ count: number;
+ };
+ };
+}
+
+interface GetLifelogsParams {
+ timezone?: string;
+ date?: string;
+ start?: string;
+ end?: string;
+ cursor?: string;
+ direction?: "asc" | "desc";
+ includeMarkdown?: boolean;
+ includeHeadings?: boolean;
+ limit?: number;
+}
+
+interface PendantData {
+ id: string;
+ name: string;
+ status: string;
+ lastSync: string;
+ batteryLevel?: number;
+}
+
+interface APIError {
+ message: string;
+ code?: string;
+}
+
+export class LimitlessAPI {
+ private static instance: LimitlessAPI;
+ private apiKey: string | undefined;
+ private baseUrl = "https://api.limitless.ai/v1";
+
+ private constructor() {
+ const preferences = getPreferenceValues<Preferences>();
+ this.apiKey = preferences.limitlessApiKey;
+ this.initializeApiKey();
+ }
+
+ private async initializeApiKey() {
+ try {
+ // Check if API key is stored in LocalStorage
+ const localStorageKey = await LocalStorage.getItem<string>("limitlessApiKey");
+ if (localStorageKey) {
+ this.apiKey = localStorageKey;
+ }
+ } catch (error) {
+ console.error("Failed to get API key from LocalStorage:", error);
+ }
greptile
logic: initializeApiKey() is called in constructor but doesn't await the result, which could lead to race conditions with API calls
diff block
useEffect(() => {
const dataList = data || [];
- setFilterList(dataList.filter((item) => item.content.includes(searchText)) || []);
+ setFilterList(
+ dataList
+ .filter((item) => item.content.includes(searchText))
+ .map((item) => {
+ item.markdown = item.content;
+ if (item.resources.length > 0) {
+ getItemMarkdown(item);
+ }
+ return item;
+ }) || [],
+ );
}, [searchText]);
useEffect(() => {
const dataList = data || [];
- setFilterList(dataList);
+ setFilterList(
+ dataList.map((item) => {
+ item.markdown = item.content;
+
+ if (item.resources.length > 0) {
+ getItemMarkdown(item);
+ }
+
+ return item;
+ }),
+ );
}, [data]);
function getItemUrl(item: MemoInfoResponse) {
- const url = getRequestUrl(`/m/${item.uid}`);
+ const url = getRequestUrl(`/${item.name}`);
return url;
}
- function getItemMarkdown(item: MemoInfoResponse) {
+ async function getItemMarkdown(item: MemoInfoResponse) {
const { content, resources } = item;
let markdown = content;
- resources.forEach((resource, index) => {
- const resourceUrl = getRequestUrl(`/file/${resource.name}/${resource.filename}`);
+ for (const resource of resources) {
+ const resourceBlobUrl = await getResourceBinToBase64(resource.name, resource.filename);
- if (index === 0) {
- markdown += "\n\n";
- }
+ markdown += `\n\n`;
+ }
- markdown += ` `;
+ setFilterList((prevList) => {
+ const updatedList = prevList.map((prevItem) => {
+ if (prevItem.name === item.name) {
+ return { ...prevItem, markdown };
+ }
+ return prevItem;
+ });
+ return updatedList;
});
-
- return markdown;
}
greptile
logic: Consider using Promise.all to process resources in getItemMarkdown to avoid potential race conditions during async updates.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: listChangedFiles() is now awaited but changedFiles assignment isn't atomic - could cause race conditions if multiple runs happen simultaneously
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, which could lead to race conditions if multiple runs occur simultaneously
diff block
+import { IconExternal } from '@posthog/icons'
+import { LemonButton } from '@posthog/lemon-ui'
+import { useActions } from 'kea'
+import { Max } from 'scenes/max/Max'
+import { urls } from 'scenes/urls'
+
+import { SidePanelPaneHeader } from '../components/SidePanelPaneHeader'
+import { sidePanelStateLogic } from '../sidePanelStateLogic'
+
+export function SidePanelMax(): JSX.Element {
+ const { closeSidePanel } = useActions(sidePanelStateLogic)
+
+ return (
+ <>
+ <SidePanelPaneHeader>
+ <div className="flex-1" />
+ <LemonButton
+ size="small"
+ sideIcon={<IconExternal />}
+ to={urls.max()}
+ onClick={() => closeSidePanel()}
+ tooltip="Open as main focus"
greptile
logic: potential race condition between `to` navigation and `onClick` handler - the panel might not close properly if navigation happens first
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
greptile
logic: These global variables could cause race conditions in concurrent ESLint runs. Consider moving them into a class or function scope.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs execute simultaneously. The changedFiles null check isn't thread-safe.
diff block
await expect(workflowVisualizer.getAllStepNodes()).toHaveCount(1);
});
-test.fixme(
- 'Use an old version as draft while having a pending draft version',
- async ({ workflowVisualizer, page }) => {
- await workflowVisualizer.createInitialTrigger('record-created');
-
- await workflowVisualizer.createStep('create-record');
-
- await workflowVisualizer.background.click();
-
- await Promise.all([
- expect(workflowVisualizer.workflowStatus).toHaveText('Active'),
-
- workflowVisualizer.activateWorkflowButton.click(),
- ]);
-
- await Promise.all([
- expect(workflowVisualizer.workflowStatus).toHaveText('Draft'),
-
- workflowVisualizer.createStep('delete-record'),
- ]);
-
- await expect(workflowVisualizer.triggerNode).toContainText(
- 'Record is Created',
- );
- await expect(workflowVisualizer.getAllStepNodes()).toContainText([
- 'Create Record',
- 'Delete Record',
- ]);
- await expect(workflowVisualizer.getAllStepNodes()).toHaveCount(2);
- await expect(workflowVisualizer.useAsDraftButton).not.toBeVisible();
-
- const workflowsLink = page.getByRole('link', { name: 'Workflows' });
- await workflowsLink.click();
-
- const recordTableRowForWorkflow = page.getByRole('row', {
- name: workflowVisualizer.workflowName,
- });
-
- const linkToWorkflow = recordTableRowForWorkflow.getByRole('link', {
- name: workflowVisualizer.workflowName,
- });
- expect(linkToWorkflow).toBeVisible();
-
- const linkToFirstWorkflowVersion = recordTableRowForWorkflow.getByRole(
- 'link',
- {
- name: 'v1',
- },
- );
-
- await linkToFirstWorkflowVersion.click();
-
- await expect(workflowVisualizer.workflowStatus).toHaveText('Active');
- await expect(workflowVisualizer.useAsDraftButton).toBeVisible();
- await expect(workflowVisualizer.triggerNode).toContainText(
- 'Record is Created',
- );
- await expect(workflowVisualizer.getAllStepNodes()).toContainText([
- 'Create Record',
- ]);
- await expect(workflowVisualizer.getAllStepNodes()).toHaveCount(1);
-
- await Promise.all([
- expect(workflowVisualizer.overrideDraftButton).toBeVisible(),
-
- workflowVisualizer.useAsDraftButton.click(),
- ]);
-
- await Promise.all([
- page.waitForURL(`/object/workflow/${workflowVisualizer.workflowId}`),
-
- workflowVisualizer.overrideDraftButton.click(),
- ]);
-
- await expect(workflowVisualizer.workflowStatus).toHaveText('Draft');
- await expect(workflowVisualizer.useAsDraftButton).not.toBeVisible();
- await expect(workflowVisualizer.triggerNode).toContainText(
- 'Record is Created',
- );
- await expect(workflowVisualizer.getAllStepNodes()).toContainText([
- 'Create Record',
- ]);
- await expect(workflowVisualizer.getAllStepNodes()).toHaveCount(1);
- await expect(workflowVisualizer.activateWorkflowButton).toBeVisible();
- await expect(workflowVisualizer.discardDraftButton).toBeVisible();
- },
-);
+test('Use an old version as draft while having a pending draft version', async ({
+ workflowVisualizer,
+ page,
+}) => {
+ await workflowVisualizer.createInitialTrigger('record-created');
+
+ await workflowVisualizer.createStep('create-record');
+
+ await workflowVisualizer.background.click();
+
+ await Promise.all([
+ expect(workflowVisualizer.workflowStatus).toHaveText('Active'),
+
+ workflowVisualizer.activateWorkflowButton.click(),
+ ]);
+
+ await Promise.all([
+ expect(workflowVisualizer.workflowStatus).toHaveText('Draft'),
+
+ workflowVisualizer.createStep('delete-record'),
+ ]);
+
+ await expect(workflowVisualizer.triggerNode).toContainText(
+ 'Record is Created',
+ );
+ await expect(workflowVisualizer.getAllStepNodes()).toContainText([
+ 'Create Record',
+ 'Delete Record',
+ ]);
+ await expect(workflowVisualizer.getAllStepNodes()).toHaveCount(2);
+ await expect(workflowVisualizer.useAsDraftButton).not.toBeVisible();
+
+ await workflowVisualizer.closeSidePanel();
+
+ const workflowsLink = page.getByRole('link', { name: 'Workflows' });
+ await workflowsLink.click();
+
+ const recordTableRowForWorkflow = page.getByRole('row', {
+ name: workflowVisualizer.workflowName,
+ });
+
+ const linkToWorkflow = recordTableRowForWorkflow.getByRole('link', {
+ name: workflowVisualizer.workflowName,
+ });
+ expect(linkToWorkflow).toBeVisible();
+
+ const linkToFirstWorkflowVersion = recordTableRowForWorkflow.getByRole(
+ 'link',
+ {
+ name: 'v1',
+ },
+ );
+
+ await linkToFirstWorkflowVersion.click();
+
+ await expect(workflowVisualizer.workflowStatus).toHaveText('Active');
+ await expect(workflowVisualizer.useAsDraftButton).toBeVisible();
+ await expect(workflowVisualizer.triggerNode).toContainText(
+ 'Record is Created',
+ );
+ await expect(workflowVisualizer.getAllStepNodes()).toContainText([
+ 'Create Record',
+ ]);
+ await expect(workflowVisualizer.getAllStepNodes()).toHaveCount(1);
+
+ await Promise.all([
+ expect(workflowVisualizer.overrideDraftButton).toBeVisible(),
+
+ workflowVisualizer.useAsDraftButton.click(),
+ ]);
greptile
logic: check for overrideDraftButton visibility and useAsDraftButton click should be sequential, not parallel, to avoid race conditions
diff block
+/* eslint-disable @typescript-eslint/no-explicit-any */
+
+import { clearSearchBar, getPreferenceValues, showToast, Toast } from "@raycast/api";
+import { useCallback, useMemo, useRef, useState } from "react";
+import say from "say";
+import { v4 as uuidv4 } from "uuid";
+import { Chat, ChatHook, Model } from "../type";
+import { useAutoTTS } from "./useAutoTTS";
+import { useHistory } from "./useHistory";
+import { DEFAULT_MODEL } from "./useModel";
+
+// Debug logging utility
+function debugLog<T>(message: string, data?: T) {
+ console.log(`[DEBUG] ${message}`, data ? JSON.stringify(data, null, 2) : "");
+}
+
+async function callGrokAPI(
+ params: { model: string; messages: string[]; stream: boolean },
+ options: { signal: AbortSignal },
+) {
+ const apiKey = getPreferenceValues<{ apiKey: string }>().apiKey;
+ if (!apiKey) {
+ debugLog("API key missing");
+ throw new Error("Grok API key is missing in preferences");
+ }
+
+ const endpoint = "https://api.x.ai/v1/chat/completions";
+ const headers = {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${apiKey}`,
+ };
+ const body = JSON.stringify(params);
+
+ debugLog("API Request", { endpoint, headers: { ...headers, Authorization: "[REDACTED]" }, body });
+
+ try {
+ const response = await fetch(endpoint, {
+ method: "POST",
+ headers,
+ body,
+ signal: options.signal,
+ });
+
+ debugLog("API Response Status", { status: response.status, statusText: response.statusText });
+
+ if (!response.ok) {
+ const errorText = await response.text();
+ debugLog("API Error Response", { errorText });
+ throw new Error(`HTTP ${response.status}: ${errorText || response.statusText}`);
+ }
+
+ if (!params.stream) {
+ const json = await response.json();
+ debugLog("API Non-Streaming Response", json);
+ return json;
+ }
+
+ debugLog("Starting streaming response");
+ return {
+ async *[Symbol.asyncIterator]() {
+ const reader = response.body?.getReader();
+ if (!reader) {
+ debugLog("No stream reader available");
+ return;
+ }
+ let buffer = "";
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) {
+ debugLog("Stream ended");
+ break;
+ }
+ buffer += new TextDecoder().decode(value);
+ const lines = buffer.split("\n");
+ buffer = lines.pop() || "";
+ for (const line of lines) {
+ if (line.trim()) {
+ try {
+ // Strip 'data: ' prefix for SSE
+ if (line.startsWith("data: ")) {
+ const jsonStr = line.slice(6); // Remove 'data: '
+ if (jsonStr === "[DONE]") {
+ debugLog("Stream termination signal received");
+ return;
+ }
+ const parsed = JSON.parse(jsonStr);
+ debugLog("Stream Chunk", parsed);
+ yield parsed;
+ } else {
+ debugLog("Skipping non-data line", { line });
+ }
+ } catch (e) {
+ debugLog("Failed to parse stream chunk", { line, error: String(e) });
+ }
+ }
+ }
+ }
+ },
+ };
+ } catch (error) {
+ debugLog("API Call Failed", { error: String(error), stack: error instanceof Error ? error.stack : undefined });
+ throw error;
+ }
+}
+
+export function useChat<T extends Chat>(props: T[]): ChatHook {
+ const [data, setData] = useState<Chat[]>(props);
+ const [errorMsg, setErrorMsg] = useState<string | null>(null);
+ const [selectedChatId, setSelectedChatId] = useState<string | null>(null);
+ const [isLoading, setLoading] = useState<boolean>(false);
+ const [isAborted, setIsAborted] = useState<boolean>(false);
+ const [useStream] = useState<boolean>(() => {
+ const streamPref = getPreferenceValues<{ useStream: boolean }>().useStream;
+ debugLog("Stream Preference", { useStream: streamPref });
+ return streamPref;
+ });
+ const [streamData, setStreamData] = useState<Chat | undefined>();
+ const abortControllerRef = useRef<AbortController | null>(null);
+
+ const [isHistoryPaused] = useState<boolean>(() => {
+ const paused = getPreferenceValues<{ isHistoryPaused: boolean }>().isHistoryPaused;
+ debugLog("History Paused Preference", { isHistoryPaused: paused });
+ return paused;
+ });
+
+ const history = useHistory();
+ const isAutoTTS = useAutoTTS();
+
+ async function ask(question: string, model: Model) {
+ debugLog("Ask Called", { question, modelId: model.id, modelOption: model.option });
+
+ clearSearchBar();
+ setLoading(true);
+ await showToast({
+ title: "Getting Grok's answer...",
+ style: Toast.Style.Animated,
+ });
+
+ let chat: Chat = {
+ id: uuidv4(),
+ question,
+ answer: "",
+ created_at: new Date().toISOString(),
+ files: [],
+ };
+
+ setData((prev) => {
+ debugLog("Updating chat data", { newChatId: chat.id });
+ return [...prev, chat];
+ });
+
+ setTimeout(() => {
+ setSelectedChatId(chat.id);
+ debugLog("Selected chat ID", { selectedChatId: chat.id });
+ }, 50);
greptile
style: Using setTimeout for state updates can lead to race conditions. Consider using React's useEffect with the appropriate dependencies instead.
diff block
if not start <= last_modified <= end:
continue
+ # Skip image files
+ file_name = os.path.basename(obj["Key"])
+ file_ext = get_file_ext(file_name)
+ if is_accepted_file_ext(file_ext, OnyxExtensionType.Multimedia):
+ logger.debug(f"Skipping image file: {obj['Key']}")
+ continue
greptile
logic: Potential race condition - file_name is extracted twice (here and line 210). Use single variable to avoid inconsistency if file changes between calls.
suggested fix
file_name = os.path.basename(obj["Key"])
file_ext = get_file_ext(file_name)
if is_accepted_file_ext(file_ext, OnyxExtensionType.Multimedia):
logger.debug(f"Skipping image file: {obj['Key']}")
continue
+ downloaded_file = self._download_object(obj["Key"])
+ link = self._get_blob_link(obj["Key"])
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple concurrent runs modify changedFiles - consider using a lock or making changedFiles scoped to each run
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple processes call this simultaneously while changedFiles is null
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs execute simultaneously. The null check and assignment aren't atomic.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible if multiple calls occur before changedFiles is populated
diff block
export const SettingsAdminGeneral = () => {
const [userIdentifier, setUserIdentifier] = useState('');
const [userId, setUserId] = useState('');
-
- const { error: impersonateError } = useImpersonate();
+ const { enqueueSnackBar } = useSnackBar();
const { activeTabId, setActiveTabId } = useTabList(
SETTINGS_ADMIN_USER_LOOKUP_WORKSPACE_TABS_ID,
);
- const userLookupResult = useRecoilValue(userLookupResultState);
- const adminPanelError = useRecoilValue(adminPanelErrorState);
+ const setUserLookupResult = useSetRecoilState(userLookupResultState);
+ const [isUserLookupLoading, setIsUserLookupLoading] = useState(false);
- const { handleUserLookup, isLoading } = useUserLookup();
+ const [userLookup] = useUserLookupAdminPanelMutation();
+ const userLookupResult = useRecoilValue(userLookupResultState);
const canManageFeatureFlags = useRecoilValue(canManageFeatureFlagsState);
const handleSearch = async () => {
setActiveTabId('');
-
- const result = await handleUserLookup(userIdentifier);
-
- if (isDefined(result?.user?.id) && !adminPanelError) {
+ setIsUserLookupLoading(true);
+ setUserLookupResult(null);
+
+ const response = await userLookup({
+ variables: { userIdentifier },
+ onCompleted: (data) => {
+ setIsUserLookupLoading(false);
+ if (isDefined(data?.userLookupAdminPanel)) {
+ setUserLookupResult(data.userLookupAdminPanel);
+ }
+ },
+ onError: (error) => {
+ setIsUserLookupLoading(false);
+ enqueueSnackBar(error.message, {
+ variant: SnackBarVariant.Error,
+ });
+ },
+ });
greptile
logic: The response from userLookup is used before the onCompleted callback is called, which could lead to race conditions. Consider moving the result handling logic into the onCompleted callback.
suggested fix
+ await userLookup({
variables: { userIdentifier },
onCompleted: (data) => {
setIsUserLookupLoading(false);
if (isDefined(data?.userLookupAdminPanel)) {
setUserLookupResult(data.userLookupAdminPanel);
+ if (isDefined(data.userLookupAdminPanel?.user?.id)) {
+ setUserId(data.userLookupAdminPanel.user.id.trim());
}
+ if (isDefined(data.userLookupAdminPanel?.workspaces) && data.userLookupAdminPanel.workspaces.length > 0) {
+ setActiveTabId(data.userLookupAdminPanel.workspaces[0].id);
}
}
},
onError: (error) => {
setIsUserLookupLoading(false);
enqueueSnackBar(error.message, {
variant: SnackBarVariant.Error,
});
},
});
diff block
+import { adminPanelErrorState } from '@/settings/admin-panel/states/adminPanelErrorState';
+import { userLookupResultState } from '@/settings/admin-panel/states/userLookupResultState';
+import { useState } from 'react';
+import { useSetRecoilState } from 'recoil';
+import { isDefined } from 'twenty-shared';
+import { useUserLookupAdminPanelMutation } from '~/generated/graphql';
+
+export const useUserLookup = () => {
+ const setUserLookupResult = useSetRecoilState(userLookupResultState);
+ const setError = useSetRecoilState(adminPanelErrorState);
+ const [isLoading, setIsLoading] = useState(false);
+
+ const [userLookup] = useUserLookupAdminPanelMutation({
+ onCompleted: (data) => {
+ setIsLoading(false);
+ if (isDefined(data?.userLookupAdminPanel)) {
+ setUserLookupResult(data.userLookupAdminPanel);
+ }
+ },
+ onError: (error) => {
+ setIsLoading(false);
+ setError(error.message);
+ },
+ });
+
+ const handleUserLookup = async (userIdentifier: string) => {
+ setError(null);
+ setIsLoading(true);
+ setUserLookupResult(null);
+
+ const response = await userLookup({
+ variables: { userIdentifier },
+ });
+
greptile
logic: response data is returned before loading state is cleared, which could lead to race conditions
diff block
// console.log("Handling invoice.paid:", invoice.id);
- const batchUpdate = [];
- for (const cusProduct of activeCusProducts) {
- // Create invoice
+ InvoiceService.createInvoiceFromStripe({
+ sb,
+ stripeInvoice: invoice,
+ internalCustomerId: activeCusProducts[0].internal_customer_id,
+ productIds: activeCusProducts.map((p) => p.product_id),
+ internalProductIds: activeCusProducts.map((p) => p.internal_product_id),
+ org: org,
+ });
greptile
logic: Missing await on async operation - could cause race conditions or unhandled promise rejections
suggested fix
+ await InvoiceService.createInvoiceFromStripe({
sb,
stripeInvoice: invoice,
internalCustomerId: activeCusProducts[0].internal_customer_id,
productIds: activeCusProducts.map((p) => p.product_id),
internalProductIds: activeCusProducts.map((p) => p.internal_product_id),
org: org,
});
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen before changedFiles is populated. Consider adding a lock mechanism.
diff block
setPendingTransactionsNumber: (pendingTransactions: number) => void;
readyToClaimTransactionsNumber: number;
setReadyToClaimTransactionsNumber: (readyToClaimTransactions: number) => void;
+ transactionStatus: TransactionStatus,
+ setTransactionStatus: (status: TransactionStatus) => void;
greptile
logic: Global transaction status state could lead to race conditions when handling multiple transactions. Consider tracking status per transaction instead.
diff block
+import { promises as fs } from 'fs';
+import { v4 } from 'uuid';
+
+const MAIN_PATH = '/tmp/main.mjs';
+
+export const handler = async (event) => {
+ const { code, params } = event;
+
+ await fs.writeFile(MAIN_PATH, code, 'utf8');
+
+ const mainFile = await import(MAIN_PATH + `?t=${v4()}`);
greptile
logic: potential race condition if multiple functions write to the same path simultaneously - need unique paths per execution
```suggestion
+ const uniquePath = `/tmp/${v4()}.mjs`;
+ await fs.writeFile(uniquePath, code, 'utf8');
+ const mainFile = await import(uniquePath + `?t=${v4()}`);
```
diff block
function ServerList({
location,
visitLocation,
+ setTopServer,
}: {
location: Location;
visitLocation: (item: Location) => Promise<void>;
+ setTopServer: (server: Server) => void;
}) {
- const { data: sortedServers, visitItem: visitServer } = useFrecencySorting(location.servers);
+ const {
+ data: sortedServers,
+ visitItem: visitServer,
+ resetRanking,
+ } = useFrecencySorting(location.servers, { namespace: `servers-${location.id}` });
+
+ const rankingCacheRef = useRef(new RankingCache<Set<string>>(cache, new Set<string>()));
- async function setServer(server: { id: string }) {
- visitLocation(location);
- // If we call visitServer directly afterwards, it won't update both frequencies
- setTimeout(() => visitServer(server), 10);
+ async function setServer(server: Server) {
+ await visitLocation(location);
+ await visitServer(server);
+ rankingCacheRef.current.update(`ranked-servers-${location.id}`, (value) => {
+ if (value === undefined) return new Set<string>([server.id]);
+ value.add(server.id);
+ return value;
+ });
+
+ setTopServer(sortedServers[0]);
execSync(`mullvad relay set location ${server.id}`);
await showHUD("Location changed", { clearRootSearch: true, popToRootType: PopToRootType.Immediate });
}
+ async function resetServerRanking(server: Server) {
+ try {
+ await resetRanking(server);
+ if (rankingCacheRef.current.get(`ranked-servers-${location.id}`).size > 1) {
+ setTopServer(sortedServers[0]);
+ } else {
+ setTopServer({ id: "" });
+ }
greptile
logic: Race condition possible here. Should check if sortedServers exists and has length before accessing index 0
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, which could lead to race conditions if multiple runs occur simultaneously
diff block
+import {
+ IconDatabase,
+ IconFeatures,
+ IconGraph,
+ IconMessage,
+ IconRewindPlay,
+ IconTestTube,
+ IconToggle,
+} from '@posthog/icons'
+import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea'
+import { loaders } from 'kea-loaders'
+import { router } from 'kea-router'
+import api from 'lib/api'
+import { reverseProxyCheckerLogic } from 'lib/components/ReverseProxyChecker/reverseProxyCheckerLogic'
+import { permanentlyMount } from 'lib/utils/kea-logic-builders'
+import { ProductIntentContext } from 'lib/utils/product-intents'
+import posthog from 'posthog-js'
+import { availableOnboardingProducts } from 'scenes/onboarding/utils'
+import { membersLogic } from 'scenes/organization/membersLogic'
+import { inviteLogic } from 'scenes/settings/organization/inviteLogic'
+import { teamLogic } from 'scenes/teamLogic'
+import { urls } from 'scenes/urls'
+
+import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic'
+import {
+ ActivationTaskStatus,
+ EventDefinitionType,
+ PipelineStage,
+ ProductKey,
+ ReplayTabs,
+ TeamBasicType,
+ type TeamPublicType,
+ type TeamType,
+} from '~/types'
+
+import { sidePanelSettingsLogic } from '../sidePanelSettingsLogic'
+import type { activationLogicType } from './activationLogicType'
+
+export type ActivationTaskDefinition = {
+ id: ActivationTask
+ section: ActivationSection
+ title: string
+ canSkip: boolean
+ dependsOn?: {
+ task: ActivationTask
+ reason: string
+ }[]
+ url?: string
+}
+
+export type ActivationTaskType = Omit<ActivationTaskDefinition, 'dependsOn'> & {
+ completed: boolean
+ skipped: boolean
+ lockedReason?: string
+}
+
+// make sure to change this prefix in case the schema of cached values is changed
+// otherwise the code will try to run with cached deprecated values
+const CACHE_PREFIX = 'v1'
+
+export const activationLogic = kea<activationLogicType>([
+ path(['lib', 'components', 'ActivationSidebar', 'activationLogic']),
+ connect(() => ({
+ values: [
+ teamLogic,
+ ['currentTeam'],
+ membersLogic,
+ ['memberCount'],
+ sidePanelStateLogic,
+ ['modalMode'],
+ reverseProxyCheckerLogic,
+ ['hasReverseProxy'],
+ ],
+ actions: [
+ teamLogic,
+ ['loadCurrentTeam', 'updateCurrentTeam'],
+ inviteLogic,
+ ['showInviteModal'],
+ sidePanelSettingsLogic,
+ ['openSettingsPanel'],
+ sidePanelStateLogic,
+ ['closeSidePanel'],
+ teamLogic,
+ ['addProductIntent'],
+ ],
+ })),
+ actions({
+ runTask: (id: ActivationTask) => ({ id }),
+ markTaskAsCompleted: (id: ActivationTask) => ({ id }),
+ markTaskAsSkipped: (id: ActivationTask) => ({ id }),
+ toggleShowHiddenSections: () => ({}),
+ addIntentForSection: (section: ActivationSection) => ({ section }),
+ toggleSectionOpen: (section: ActivationSection) => ({ section }),
+ setOpenSections: (teamId: TeamBasicType['id'], sections: ActivationSection[]) => ({ teamId, sections }),
+ onTeamLoad: true,
+ }),
+ reducers(() => ({
+ openSections: [
+ {} as Record<string, ActivationSection[]>,
+ { persist: true, prefix: CACHE_PREFIX },
+ {
+ setOpenSections: (state, { teamId, sections }) => {
+ return {
+ ...state,
+ [teamId]: sections,
+ }
+ },
+ },
+ ],
+ showHiddenSections: [
+ false,
+ {
+ toggleShowHiddenSections: (state) => !state,
+ },
+ ],
+ })),
+ loaders(({ cache }) => ({
+ customEventsCount: [
+ 0,
+ {
+ loadCustomEvents: async (_, breakpoint) => {
+ await breakpoint(200)
+ const url = api.eventDefinitions.determineListEndpoint({
greptile
logic: potential race condition between breakpoint(200) and api call - consider moving breakpoint after the cache check
diff block
router.actions.push(urls.onboarding(nextProductKey))
}
}
+
+ const mountedActivationLogic = activationLogic.findMounted()
+
+ if (mountedActivationLogic && mountedActivationLogic.values.isReady && values.isFirstProductOnboarding) {
+ mountedActivationLogic.actions.openSidePanel(SidePanelTab.Activation)
+ }
greptile
style: Race condition possible if activationLogic becomes ready after this check but before the panel needs to open. Consider using a listener or effect instead.
diff block
}),
actionToUrl(({ values }) => ({
setTabKey: ({ tabKey }) => {
- const tab = values.tabs.find((x) => x.key === tabKey)
- if (!tab) {
+ let tabUrl = values.tabs.find((x) => x.key === tabKey)?.url
+ if (!tabUrl && values.groupTypesLoading) {
+ const groupMatch = tabKey.match(/^groups-(\d+)$/)
+ if (groupMatch) {
+ tabUrl = urls.groups(parseInt(groupMatch[1]))
+ }
+ }
+ if (!tabUrl) {
return values.tabs[0].url
}
greptile
logic: potential race condition if tabs[0] is undefined during initial load
diff block
.map_err(WorkflowError::SerializeSignalBody)
.map_err(GlobalError::raw)?;
- match (self.to_workflow_id, self.tags.is_empty()) {
- (Some(workflow_id), true) => {
- tracing::debug!(signal_name=%T::NAME, to_workflow_id=%workflow_id, %signal_id, "dispatching signal");
+ match (
+ self.to_workflow_name,
+ self.to_workflow_id,
+ self.tags.is_empty(),
+ ) {
+ (Some(workflow_name), None, _) => {
+ tracing::debug!(
+ signal_name=%T::NAME,
+ to_workflow_name=%workflow_name,
+ tags=?self.tags,
+ %signal_id,
+ "dispatching signal via workflow name and tags"
+ );
+
+ let workflow_id = self
+ .db
+ .find_workflow(workflow_name, &serde_json::Value::Object(self.tags))
+ .await?
+ .ok_or(WorkflowError::WorkflowNotFound)
+ .map_err(GlobalError::raw)?;
greptile
logic: potential race condition - workflow could be deleted between find and publish
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen while changedFiles is null
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls occur before changedFiles is populated
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state. Consider passing state as parameters instead.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: listChangedFiles() is already awaited but wrapped in spread operator - potential race condition if the Promise resolves to a non-iterable
diff block
+/**
+ * Copyright 2025 Clidey, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const dbHost = 'localhost';
+const dbUser = 'user';
+const dbPassword = 'password';
+
+describe('MongoDB E2E test', () => {
+ it('should login correctly', () => {
+ // login and setup
+ cy.login('MongoDB', 'localhost', 'user', 'password');
+ cy.selectSchema("test_db");
+
+ // get all çollections
+ cy.getTables().then(storageUnitNames => {
+ expect(storageUnitNames).to.be.an('array');
+ expect(storageUnitNames).to.deep.equal([
+ "order_items",
+ "order_summary",
+ "orders",
+ "payments",
+ "products",
+ "system.views",
+ "users",
+ ]);
+ });
+
+ // check users table and fields
+ cy.explore("users");
+ cy.getExploreFields().then(text => {
+ const textLines = text.split("\n");
+
+ const expectedPatterns = [
+ /^users$/,
+ /^Type: Collection$/,
+ /^Storage Size: .+$/, // Ignores actual size value
+ /^Count: .+$/, // Ignores actual count value
+ ];
+ expectedPatterns.forEach(pattern => {
+ expect(textLines.some(line => pattern.test(line))).to.be.true;
+ });
+ });
+
+ // check user default data
+ cy.data("users");
+ cy.sortBy(0);
+
+ const expectedData = [
+ {
+ _id: undefined, // only used to update and not needed
+ email: "john@example.com",
+ password: "securepassword1",
+ username: "john_doe",
+ },
+ {
+ _id: undefined, // only used to update and not needed
+ email: "jane@example.com",
+ password: "securepassword2",
+ username: "jane_smith",
+ },
+ {
+ _id: undefined, // only used to update and not needed
+ email: "admin@example.com",
+ password: "adminpass",
+ username: "admin_user",
+ }
+ ];
+
+ function validateRow(row, expected, expectedIndex) {
+ const [rowIndex, rawJson] = row;
+ const json = JSON.parse(rawJson);
+ if (expectedData[expectedIndex-1]._id == null) {
+ expectedData[expectedIndex-1]._id = json["_id"];
greptile
logic: potential race condition - _id assignment could overwrite previously set values if validateRow is called multiple times on same index
diff block
+import { getPreferenceValues, launchCommand, LaunchType, LocalStorage, showHUD, showToast, Toast } from "@raycast/api";
+import { AudioDevice, getDefaultOutputDevice, setDefaultOutputDevice, setDefaultSystemDevice } from "./audio-device";
+
+export default async () => {
+ const { systemOutput } = getPreferenceValues();
+ const current = await getDefaultOutputDevice();
+
+ // Get last used device from localstorage and parse it
+ const lastUsedDevice = JSON.parse((await LocalStorage.getItem("lastUsedDevice")) || "null") as AudioDevice | null;
+
+ try {
+ // and the name is not same
+ if (lastUsedDevice && lastUsedDevice.id !== current.id) {
+ // Store current device as last used
+ await LocalStorage.setItem("lastUsedDevice", JSON.stringify(current));
+
+ // Switch to last used device
+ await setDefaultOutputDevice(lastUsedDevice.id);
+ if (systemOutput) {
+ await setDefaultSystemDevice(lastUsedDevice.id);
+ }
+ LocalStorage.setItem("lastUsedDevice", JSON.stringify(current));
+ await showHUD(`Active output audio device set to ${lastUsedDevice.name}`);
greptile
logic: Duplicate LocalStorage.setItem call on line 22 could cause race conditions. The second call should be removed since it's redundant.
```suggestion
await LocalStorage.setItem("lastUsedDevice", JSON.stringify(current));
// Switch to last used device
await setDefaultOutputDevice(lastUsedDevice.id);
if (systemOutput) {
await setDefaultSystemDevice(lastUsedDevice.id);
}
await showHUD(`Active output audio device set to ${lastUsedDevice.name}`);
```
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, which could lead to race conditions if multiple runs occur simultaneously
diff block
MAX_AGE_MINUTES = 15
+def get_cohort_calculation_candidates_queryset() -> QuerySet:
+ return Cohort.objects.filter(
+ Q(last_calculation__lte=timezone.now() - relativedelta(minutes=MAX_AGE_MINUTES))
+ | Q(last_calculation__isnull=True),
+ deleted=False,
+ is_calculating=False,
+ errors_calculating__lte=20,
+ ).exclude(is_static=True)
+
+
def enqueue_cohorts_to_calculate(parallel_count: int) -> None:
"""
Calculates maximum N cohorts in parallel.
Args:
parallel_count: Maximum number of cohorts to calculate in parallel.
"""
-
- # This task will be run every minute
- # Every minute, grab a few cohorts off the list and execute them
-
- # calculate exponential backoff
+ # Exponential backoff, with the first one starting after 30 minutes
backoff_duration = ExpressionWrapper(
timedelta(minutes=30) * (2 ** F("errors_calculating")), # type: ignore
output_field=DurationField(),
)
for cohort in (
- Cohort.objects.filter(
- deleted=False,
- is_calculating=False,
- last_calculation__lte=timezone.now() - relativedelta(minutes=MAX_AGE_MINUTES),
- errors_calculating__lte=20,
- # Exponential backoff, with the first one starting after 30 minutes
- )
+ get_cohort_calculation_candidates_queryset()
.filter(
Q(last_error_at__lte=timezone.now() - backoff_duration) # type: ignore
| Q(last_error_at__isnull=True) # backwards compatability cohorts before last_error_at was introduced
)
- .exclude(is_static=True)
.order_by(F("last_calculation").asc(nulls_first=True))[0:parallel_count]
):
cohort = Cohort.objects.filter(pk=cohort.pk).get()
increment_version_and_enqueue_calculate_cohort(cohort, initiating_user=None)
greptile
logic: Potential race condition between filter and get - consider using select_for_update
diff block
+import { useState, useEffect } from "react";
+import { Form, ActionPanel, Action, showToast, Toast, useNavigation, Icon, Detail } from "@raycast/api";
+import { useFlashcardGenerator } from "./hooks/useFlashcardGenerator";
+import { FlashcardPreview } from "./components/FlashcardPreview";
+import { Flashcard } from "./ai/flashcardGenerator";
+import { AIEnhancer } from "./ai/aiEnhancer";
+import { ErrorHandler } from "./utils/errorHandler";
+import { AnkiRepository } from "./anki/ankiRepository";
+
+// Formulário para editar um flashcard
+function EditFlashcardForm({ flashcard, onSave }: { flashcard: Flashcard; onSave: (updatedCard: Flashcard) => void }) {
+ const { pop } = useNavigation();
+ const [frontText, setFrontText] = useState(flashcard.front);
+ const [backText, setBackText] = useState(flashcard.back);
+ const [extraText, setExtraText] = useState(flashcard.extra || "");
+ const [imageUrl, setImageUrl] = useState(flashcard.image || "");
+ const [tagsText, setTagsText] = useState(flashcard.tags ? flashcard.tags.join(", ") : "");
+
+ const handleSubmit = () => {
+ const tags = tagsText
+ .split(",")
+ .map((tag) => tag.trim())
+ .filter((tag) => tag);
+ onSave({
+ front: frontText,
+ back: backText,
+ extra: extraText,
+ image: imageUrl || undefined,
+ tags: tags.length > 0 ? tags : undefined,
+ });
+ pop();
+ };
+
+ return (
+ <Form
+ actions={
+ <ActionPanel>
+ <Action.SubmitForm title="Salvar" onSubmit={handleSubmit} />
+ </ActionPanel>
+ }
+ >
+ <Form.TextArea
+ id="front"
+ title="Frente"
+ placeholder="Pergunta ou conceito"
+ value={frontText}
+ onChange={setFrontText}
+ />
+ <Form.TextArea
+ id="back"
+ title="Verso"
+ placeholder="Resposta ou explicação"
+ value={backText}
+ onChange={setBackText}
+ />
+ <Form.TextArea
+ id="extra"
+ title="Informações Extras"
+ placeholder="Informações adicionais, exemplos ou contexto"
+ value={extraText}
+ onChange={setExtraText}
+ />
+ <Form.TextField
+ id="image"
+ title="URL da Imagem (opcional)"
+ placeholder="https://exemplo.com/imagem.jpg"
+ value={imageUrl}
+ onChange={setImageUrl}
+ />
+ <Form.TextField
+ id="tags"
+ title="Tags (opcional)"
+ placeholder="Separe as tags por vírgulas"
+ value={tagsText}
+ onChange={setTagsText}
+ />
+ </Form>
+ );
+}
+
+// Formulário para configurar a exportação para o Anki
+function ExportToAnkiForm({
+ flashcards,
+ decks,
+ onExport,
+}: {
+ flashcards: Flashcard[];
+ decks: string[];
+ onExport: (flashcards: Flashcard[], deckName: string, modelName: string, tags: string[]) => void;
+}) {
+ const [selectedDeck, setSelectedDeck] = useState(decks[0] || "");
+ const [selectedModel, setSelectedModel] = useState("Raycast Flashcards");
+ const [tagsText, setTagsText] = useState("");
+ const [models, setModels] = useState<string[]>([]);
+ const [modelFields, setModelFields] = useState<string[]>([]);
+ const [isLoading, setIsLoading] = useState(true);
+ const [showPreview, setShowPreview] = useState(false);
+ const { pop } = useNavigation();
+
+ useEffect(() => {
+ loadModels();
+ }, []);
+
+ useEffect(() => {
+ if (selectedModel) {
+ loadModelFields(selectedModel);
+ }
+ }, [selectedModel]);
+
+ const loadModels = async () => {
+ try {
+ const response = await AnkiRepository.modelNames();
+ if (!response.error && response.result) {
+ setModels(response.result);
+ // Adicionar "Raycast Flashcards" à lista se não existir
+ if (!response.result.includes("Raycast Flashcards")) {
+ setModels((prev) => [...prev, "Raycast Flashcards"]);
+ }
+ // Usar "Raycast Flashcards" como padrão
+ setSelectedModel("Raycast Flashcards");
+ }
+ } catch (error) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Erro ao carregar modelos",
+ message: "Verifique se o Anki está aberto",
+ });
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const loadModelFields = async (modelName: string) => {
+ try {
+ const response = await AnkiRepository.modelFieldNames(modelName);
+ if (!response.error && response.result) {
+ setModelFields(response.result);
+ }
+ } catch (error) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Erro ao carregar campos do modelo",
+ message: "Verifique se o Anki está aberto",
+ });
+ }
+ };
+
+ const handleSubmit = () => {
+ const tags = tagsText
+ .split(",")
+ .map((tag) => tag.trim())
+ .filter((tag) => tag);
+ onExport(flashcards, selectedDeck, selectedModel, tags);
+ pop();
+ };
+
+ const previewFields = () => {
+ if (flashcards.length === 0 || modelFields.length === 0) return null;
+
+ // Pegar o primeiro flashcard como exemplo
+ const example = flashcards[0];
+ return (
+ <Form.Description
+ text={`
+Campos do modelo "${selectedModel}":
+${modelFields
+ .map((field) => {
+ let value = "";
+ const fieldLower = field.toLowerCase();
+ if (
+ fieldLower.includes("front") ||
+ fieldLower.includes("question") ||
+ fieldLower.includes("frente") ||
+ fieldLower.includes("pergunta")
+ ) {
+ value = example.front;
+ } else if (
+ fieldLower.includes("back") ||
+ fieldLower.includes("answer") ||
+ fieldLower.includes("verso") ||
+ fieldLower.includes("resposta")
+ ) {
+ value = example.back;
+ } else if (fieldLower.includes("extra") || fieldLower.includes("note") || fieldLower.includes("nota")) {
+ value = example.extra || "";
+ }
+ return `${field}: ${value.substring(0, 50)}${value.length > 50 ? "..." : ""}`;
+ })
+ .join("\n")}
+ `}
+ />
+ );
+ };
+
+ if (isLoading) {
+ return <Detail isLoading={true} />;
+ }
+
+ if (showPreview) {
+ return (
+ <Detail
+ navigationTitle="Prévia dos Flashcards para Exportação"
+ markdown={`
+# Prévia dos Flashcards para Exportação
+
+## Configurações
+- **Deck**: ${selectedDeck}
+- **Modelo**: ${selectedModel}
+- **Tags**: ${tagsText || "Nenhuma"}
+
+## Flashcards (${flashcards.length})
+${flashcards
+ .map(
+ (card, index) => `
+### Flashcard ${index + 1}
+
+**Frente**: ${card.front}
+
+**Verso**: ${card.back}
+
+${card.extra ? `**Extra**: ${card.extra}` : ""}
+${card.tags && card.tags.length > 0 ? `**Tags**: ${card.tags.join(", ")}` : ""}
+---`,
+ )
+ .join("\n")}
+ `}
+ actions={
+ <ActionPanel>
+ <Action title="Voltar Para Configurações" icon={Icon.ArrowLeft} onAction={() => setShowPreview(false)} />
+ <Action
+ title="Confirmar E Exportar"
+ icon={Icon.Download}
+ onAction={() => {
+ const tags = tagsText
+ .split(",")
+ .map((tag) => tag.trim())
+ .filter((tag) => tag);
+ onExport(flashcards, selectedDeck, selectedModel, tags);
+ }}
+ />
+ </ActionPanel>
+ }
+ />
+ );
+ }
+
+ return (
+ <Form
+ actions={
+ <ActionPanel>
+ <Action.SubmitForm title="Exportar Para Anki" onSubmit={handleSubmit} />
+ <Action title="Visualizar Flashcards" icon={Icon.Eye} onAction={() => setShowPreview(true)} />
+ </ActionPanel>
+ }
+ >
+ <Form.Dropdown id="deck" title="Deck" value={selectedDeck} onChange={setSelectedDeck}>
+ {decks.map((deck) => (
+ <Form.Dropdown.Item key={deck} value={deck} title={deck} />
+ ))}
+ </Form.Dropdown>
+
+ <Form.Dropdown id="model" title="Modelo" value={selectedModel} onChange={setSelectedModel}>
+ {models.map((model) => (
+ <Form.Dropdown.Item key={model} value={model} title={model} />
+ ))}
+ </Form.Dropdown>
+
+ <Form.TextField
+ id="tags"
+ title="Tags (separadas por vírgula)"
+ placeholder="tag1, tag2, tag3"
+ value={tagsText}
+ onChange={setTagsText}
+ />
+
+ {previewFields()}
+ </Form>
+ );
+}
+
+// Tela principal para gerar flashcards
+export default function GenerateFlashcardCommand() {
+ const { push } = useNavigation();
+ const [text, setText] = useState("");
+ const [language, setLanguage] = useState("português");
+ const [isPreviewMode, setIsPreviewMode] = useState(false);
+ const [topic, setTopic] = useState("");
+ const [selectedFlashcards, setSelectedFlashcards] = useState<Set<number>>(new Set());
+ const [aiModel, setAiModel] = useState("OpenAI_GPT4o");
+
+ const aiModels = [
+ { value: "OpenAI_GPT4o", label: "GPT-4o (Recomendado)" },
+ { value: "OpenAI_GPT4o-mini", label: "GPT-4o Mini (Mais rápido)" },
+ { value: "OpenAI_o1", label: "OpenAI O1 (Especialista em código)" },
+ { value: "Anthropic_Claude_Opus", label: "Claude Opus (Alta complexidade)" },
+ { value: "Anthropic_Claude_Sonnet", label: "Claude Sonnet (Balanceado)" },
+ { value: "Anthropic_Claude_Haiku", label: "Claude Haiku (Mais rápido)" },
+ ];
+
+ const {
+ isLoading,
+ setIsLoading,
+ flashcards,
+ decks,
+ loadDecks,
+ generateFlashcards,
+ addFlashcardsToAnki,
+ editFlashcard,
+ deleteFlashcard,
+ testAnkiConnection,
+ } = useFlashcardGenerator();
+
+ useEffect(() => {
+ loadDecks().catch(() => {
+ ErrorHandler.handleAnkiConnectionError();
+ });
+ }, []);
+
+ useEffect(() => {
+ if (flashcards.length > 0 && selectedFlashcards.size === 0) {
+ const allIndices = new Set(flashcards.map((_, index) => index));
+ setSelectedFlashcards(allIndices);
+ }
+ }, [flashcards]);
+
+ const handleGenerateFlashcards = async () => {
+ if (!text.trim()) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Texto vazio",
+ message: "Por favor, insira algum texto para gerar flashcards",
+ });
+ return;
+ }
+
+ try {
+ setIsLoading(true);
+ showToast({
+ style: Toast.Style.Animated,
+ title: "Gerando flashcards...",
+ message: "Isso pode levar alguns segundos",
+ });
+
+ // Extract model name from the selected option
+ const selectedModel = aiModel !== "default" ? aiModel : undefined;
+ console.log(`Gerando flashcards com modelo: ${selectedModel || "padrão"}, idioma: ${language}`);
+
+ const cards = await generateFlashcards(text, language, selectedModel);
+
+ if (cards && cards.length > 0) {
+ setIsPreviewMode(true);
+ showToast({
+ style: Toast.Style.Success,
+ title: `${cards.length} flashcards gerados`,
+ message: "Você pode editar ou exportar para o Anki",
+ });
+ } else {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Falha ao gerar flashcards",
+ message: "Nenhum flashcard foi gerado. Tente novamente com um texto diferente ou mais detalhado.",
+ });
+ }
+ } catch (error) {
+ console.error("Erro ao gerar flashcards:", error);
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Erro ao gerar flashcards",
+ message: error instanceof Error ? error.message : "Erro desconhecido",
+ });
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const handleEditFlashcard = (index: number, card: Flashcard) => {
+ push(
+ <EditFlashcardForm
+ flashcard={card}
+ onSave={(updatedCard) => {
+ editFlashcard(index, updatedCard);
+ }}
+ />,
+ );
+ };
+
+ const handleExportToAnki = () => {
+ const selectedCards = flashcards.filter((_, idx) => selectedFlashcards.has(idx));
+
+ if (selectedCards.length === 0) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Nenhum flashcard selecionado",
+ message: "Selecione pelo menos um flashcard para exportar",
+ });
+ return;
+ }
+
+ testAnkiConnection().then((connected) => {
+ if (connected) {
+ push(
+ <ExportToAnkiForm
+ flashcards={selectedCards}
+ decks={decks}
+ onExport={async (flashcards, deckName, modelName, tags) => {
+ await addFlashcardsToAnki(flashcards, deckName, modelName, tags);
+ }}
+ />,
+ );
+ } else {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Erro de conexão com o Anki",
+ message: "Verifique se o Anki está aberto e o AnkiConnect está instalado",
+ });
+ }
+ });
+ };
+
+ const toggleSelectFlashcard = (index: number) => {
+ const newSelected = new Set(selectedFlashcards);
+ if (newSelected.has(index)) {
+ newSelected.delete(index);
+ } else {
+ newSelected.add(index);
+ }
+ setSelectedFlashcards(newSelected);
+ };
+
+ if (isPreviewMode && flashcards.length > 0) {
+ return (
+ <FlashcardPreview
+ flashcards={flashcards}
+ onEdit={handleEditFlashcard}
+ onDelete={deleteFlashcard}
+ onSaveToAnki={handleExportToAnki}
+ selectedFlashcards={selectedFlashcards}
+ onToggleSelect={toggleSelectFlashcard}
+ />
+ );
+ }
+
+ return (
+ <Form
+ isLoading={isLoading}
+ actions={
+ <ActionPanel>
+ <Action.SubmitForm title="Gerar Flashcards" onSubmit={handleGenerateFlashcards} />
+ <Action title="Testar Conexão Com Anki" icon={Icon.Link} onAction={testAnkiConnection} />
+ </ActionPanel>
+ }
+ >
+ <Form.TextArea
+ id="text"
+ title="Texto"
+ placeholder="Cole aqui o texto para gerar flashcards..."
+ value={text}
+ onChange={setText}
+ />
+ <Form.Dropdown id="model" title="Modelo de IA" value={aiModel} onChange={setAiModel}>
+ {aiModels.map((model) => (
+ <Form.Dropdown.Item key={model.value} value={model.value} title={model.label} />
+ ))}
+ </Form.Dropdown>
+ <Form.TextField
+ id="topic"
+ title="Tópico (opcional)"
+ placeholder="Ex: História do Brasil, Programação, etc."
+ value={topic}
+ onChange={setTopic}
+ />
+ <Form.Dropdown id="language" title="Idioma" value={language} onChange={setLanguage}>
+ <Form.Dropdown.Item value="português" title="Português" />
+ <Form.Dropdown.Item value="english" title="English" />
+ <Form.Dropdown.Item value="español" title="Español" />
+ </Form.Dropdown>
+ <Form.Description
+ title="Como funciona"
+ text="Cole um texto e a IA irá gerar flashcards automaticamente. Você poderá revisar, editar e exportar para o Anki."
+ />
+ <Form.Separator />
+ {topic && (
+ <ActionPanel>
+ <Action
+ title="Gerar Perguntas Relacionadas"
+ icon={Icon.QuestionMark}
+ onAction={async () => {
+ showToast({ style: Toast.Style.Animated, title: "Gerando perguntas relacionadas..." });
+ const relatedQuestions = await AIEnhancer.generateRelatedQuestions(topic, 5, {
+ model: aiModel !== "default" ? aiModel : undefined,
+ });
+
+ if (relatedQuestions.length === 0) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Não foi possível gerar perguntas relacionadas",
+ });
+ return;
+ }
+
+ const newFlashcards: Flashcard[] = relatedQuestions.map((q) => ({
+ front: q.question,
+ back: q.answer,
+ extra: "",
+ }));
+
+ for (const card of newFlashcards) {
+ editFlashcard(flashcards.length, card);
+ }
greptile
style: Modifying flashcards array in a loop can cause race conditions. Consider using a batch update
diff block
// ensure all messages have an ID
newMessages = newMessages.map((m) => (m.id ? m : { ...m, id: uuidv4() }));
- const messagesMap = new Map<string, TMessage>();
- const addMessages = (newMessages: TMessage[]) => {
- if (newMessages.length === 0) return;
- for (const message of newMessages) {
- messagesMap.set(message.id ?? uuidv4(), message);
- }
- setMessages([...messagesMap.values()]);
- };
- addMessages([...messages, ...newMessages]);
+ const accumulator = new LangGraphMessageAccumulator({
+ initialMessages: messages,
+ appendMessage,
+ });
+ setMessages(accumulator.addMessages(newMessages));
greptile
logic: accumulator instance is recreated on every sendMessage call, which could lead to race conditions with concurrent updates
diff block
updaterDelegate: nil,
userDriverDelegate: nil)
- var remoteModels: RemoteModelsState
var remoteFetchFailed: Bool = false
var localFetchFailed: Bool = false
+ var account: Account? {
+ didSet {
+ if account == nil {
+ subscription = nil
+ } else {
+ Task {
+ subscription = try? await FetchingClient().getSubscription()
+ }
+ }
greptile
logic: Task is not cancelled when account is set to nil, could cause race conditions if account changes rapidly
diff block
import { buttonVariants } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Search, Upload } from "lucide-react";
-import { I18nSelector } from "@/components/ui/i18n-selector";
-import { useStore } from "@nanostores/react";
-import { searchQuery } from "@/stores/search";
import { cn } from "@/lib/utils";
+import { searchQuery } from "@/stores/search";
+import { useCallback, useRef, useState } from "react";
+import { debounce } from "lodash-es";
export function Header() {
- const query = useStore(searchQuery);
+ const debouncedSearch = useRef(
+ debounce((value: string) => {
+ searchQuery.set({
+ value,
+ page: 1,
+ });
+ }, 300),
+ ).current;
+
+ const [search, setSearch] = useState("");
- const handleSearch = (event: React.ChangeEvent<HTMLInputElement>) => {
- const value = event.target.value;
- searchQuery.set(value);
- };
+ const handleSearch = useCallback(
+ (event: React.ChangeEvent<HTMLInputElement>) => {
+ const value = event.target.value;
+ setSearch(value);
+
+ debouncedSearch.cancel();
+ debouncedSearch(value);
greptile
style: Calling cancel() before every debounce invocation may cause race conditions. Consider using useEffect cleanup instead.
diff block
import { Command } from 'commander';
+import fs, { existsSync } from 'fs';
+import path from 'path';
+import prompts from 'prompts';
+
+const COMPONENTS = ['checkbox', 'radio-group', 'radio-item', 'button', 'icon'];
export const add = new Command()
.command('add')
.description('Add a new component')
- .action(() => {
- console.log('Adding a new component');
+ .option('-a, --all', 'add all components')
+ .action(async (options) => {
+ try {
+ let selectComponents: string[] = [];
+ if (options.all) {
+ selectComponents = [...COMPONENTS];
+ console.log(`adding all components: ${selectComponents.join(', ')}`);
+ } else {
+ const { components } = await prompts({
+ type: 'multiselect',
+ name: 'components',
+ message: 'select the components you want to install:',
+ choices: COMPONENTS.map((component) => ({
+ title: component,
+ value: component,
+ })),
+ validate: (components) =>
+ components.length > 0 ? true : 'please select at least one component from the list',
+ });
+
+ selectComponents = components || [];
+
+ if (selectComponents.length === 0) {
+ console.log('no components selected.');
+ return;
+ }
+ }
+
+ const { installPath } = await prompts({
+ type: 'text',
+ name: 'installPath',
+ initial: './',
+ message: 'please enter the path where the components should be installed:',
+ validate: (input) => (input.trim().length > 0 ? true : 'please enter a valid path'),
+ });
+
+ if (!installPath) {
+ console.log('no valid path inputted.');
+ return;
+ }
+
+ if (!existsSync(installPath)) {
+ console.error(`ERROR: invalid path ${installPath} does not exist.`);
+ return;
+ }
+
+ const validComponents: string[] = [];
+ const invalidComponents: string[] = [];
+
+ selectComponents.forEach((component) => {
+ if (COMPONENTS.includes(component)) {
+ validComponents.push(component);
+ } else {
+ invalidComponents.push(component);
+ }
+ });
+
+ if (validComponents.length === 0) {
+ console.error('ERROR: no valid components have been selected.');
+ return;
+ }
+
+ for (const component of validComponents) {
+ const folderName = `Bog${component
+ .split('-')
+ .map((part) => part.charAt(0).toUpperCase() + part.slice(1))
+ .join('')}`;
+ const destPath = path.join(installPath, folderName);
+
+ if (!fs.existsSync(destPath)) {
+ fs.mkdirSync(destPath, { recursive: true });
+ }
greptile
style: Race condition possible between existsSync check and mkdirSync. Use try/catch instead of checking existence.
diff block
+import { Action, ActionPanel, getSelectedFinderItems, Grid, Icon, showToast, Toast } from "@raycast/api";
+import { useEffect, useState } from "react";
+import { extractColor } from "swift:../swift/extract-color";
+
+export interface FinalColor {
+ hex: string;
+ red: number;
+ green: number;
+ blue: number;
+ area: number;
+ hue: number;
+ saturation: number;
+ lightness: number;
+ intensity: number;
+}
+
+export default function Command() {
+ const [columns, setColumns] = useState(3);
+ const [isLoading, setIsLoading] = useState(true);
+ const [colors, setColors] = useState<FinalColor[]>([]);
+ const [info, setInfo] = useState<{
+ title: string;
+ description: string;
+ }>({
+ title: "No image found",
+ description: "Select an image from finder to extract colors",
+ });
+
+ async function loadColors() {
+ const toast = await showToast({
+ style: Toast.Style.Animated,
+ title: "Extracting colors",
+ });
+ let path: string | undefined;
+
+ try {
+ const items = await getSelectedFinderItems();
+ if (items.length === 0) {
+ setIsLoading(false);
+ }
+ path = items[0].path;
+ } catch {
greptile
logic: Setting isLoading=false before accessing items[0] could cause race condition. Move line 39 after path assignment
suggested fix
if (items.length === 0) {
setIsLoading(false);
+ return;
}
path = items[0].path;
} catch {
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state. Consider passing state through parameters instead.
diff block
convert(themes.light).color.warning, // INCOMPLETION,
];
-export const A11yContext = React.createContext<A11yContextStore>({
+export const A11yContext = createContext<A11yContextStore>({
results: {
passes: [],
incomplete: [],
violations: [],
},
- setResults: () => {},
highlighted: [],
toggleHighlight: () => {},
clearHighlights: () => {},
tab: 0,
setTab: () => {},
+ setStatus: () => {},
+ status: 'initial',
+ error: undefined,
+ handleManual: () => {},
});
-interface A11yContextProviderProps {
- active: boolean;
-}
-
const defaultResult = {
passes: [],
incomplete: [],
violations: [],
};
-export const A11yContextProvider: React.FC<React.PropsWithChildren<A11yContextProviderProps>> = ({
- active,
- ...props
-}) => {
+type Status = 'initial' | 'manual' | 'running' | 'error' | 'ran' | 'ready';
+
+export const A11yContextProvider: FC<PropsWithChildren> = (props) => {
+ const parameters = useParameter<A11yParameters>('a11y', {
+ manual: false,
+ });
+
+ const getInitialStatus = useCallback((manual = false) => (manual ? 'manual' : 'initial'), []);
+
const [results, setResults] = useAddonState<Results>(ADDON_ID, defaultResult);
- const [tab, setTab] = React.useState(0);
- const [highlighted, setHighlighted] = React.useState<string[]>([]);
- const api = useStorybookApi();
- const storyEntry = api.getCurrentStoryData();
+ const [tab, setTab] = useState(0);
+ const [error, setError] = React.useState<unknown>(undefined);
+ const [status, setStatus] = useState<Status>(getInitialStatus(parameters.manual!));
+ const [highlighted, setHighlighted] = useState<string[]>([]);
+ const { storyId } = useStorybookState();
- const handleToggleHighlight = React.useCallback((target: string[], highlight: boolean) => {
+ const handleToggleHighlight = useCallback((target: string[], highlight: boolean) => {
setHighlighted((prevHighlighted) =>
highlight
? [...prevHighlighted, ...target]
: prevHighlighted.filter((t) => !target.includes(t))
);
}, []);
- const handleRun = (renderedStoryId: string) => {
- emit(EVENTS.REQUEST, renderedStoryId, api.getParameters(renderedStoryId, 'a11y'));
- };
- const handleClearHighlights = React.useCallback(() => setHighlighted([]), []);
- const handleSetTab = React.useCallback((index: number) => {
- handleClearHighlights();
- setTab(index);
- }, []);
- const handleReset = React.useCallback(() => {
- setTab(0);
- setResults(defaultResult);
- // Highlights is cleared by addon-highlight
+ const handleClearHighlights = useCallback(() => setHighlighted([]), []);
+
+ const handleSetTab = useCallback(
+ (index: number) => {
+ handleClearHighlights();
+ setTab(index);
+ },
+ [handleClearHighlights]
+ );
+
+ const handleError = useCallback((err: unknown) => {
+ setStatus('error');
+ setError(err);
}, []);
- const emit = useChannel({
- [STORY_RENDERED]: handleRun,
- [STORY_CHANGED]: handleReset,
- });
+ const handleResult = useCallback(
+ (axeResults: AxeResults, id: string) => {
+ if (storyId === id) {
+ setStatus('ran');
+ setResults(axeResults);
+
+ setTimeout(() => {
+ if (status === 'ran') {
+ setStatus('ready');
+ }
+ }, 900);
greptile
logic: potential race condition - status in closure may be stale since it's not in the dependency array
diff block
const { document } = global;
const channel = addons.getChannel();
-// Holds axe core running state
-let active = false;
-// Holds latest story we requested a run
-let activeStoryId: string | undefined;
const defaultParameters = { config: {}, options: {} };
-/** Handle A11yContext events. Because the event are sent without manual check, we split calls */
-const handleRequest = async (storyId: string, input: A11yParameters | null) => {
- if (!input?.manual) {
- await run(storyId, input ?? defaultParameters);
- }
-};
-
-const run = async (storyId: string, input: A11yParameters = defaultParameters) => {
- activeStoryId = storyId;
- try {
- if (!active) {
- active = true;
- channel.emit(EVENTS.RUNNING);
- const { default: axe } = await import('axe-core');
-
- const { element = '#storybook-root', config, options = {} } = input;
- const htmlElement = document.querySelector(element as string);
+export const run = async (input: A11yParameters = defaultParameters) => {
+ const { default: axe } = await import('axe-core');
- if (!htmlElement) {
- return;
- }
+ const { element = '#storybook-root', config, options = {} } = input;
+ const htmlElement = document.querySelector(element as string);
- axe.reset();
- if (config) {
- axe.configure(config);
- }
+ if (!htmlElement) {
+ return;
+ }
- const result = await axe.run(htmlElement, options);
+ axe.reset();
+ if (config) {
+ axe.configure(config);
+ }
- // Axe result contains class instances, which telejson deserializes in a
- // way that violates:
- // Content Security Policy directive: "script-src 'self' 'unsafe-inline'".
- const resultJson = JSON.parse(JSON.stringify(result));
+ return axe.run(htmlElement, options);
+};
- // It's possible that we requested a new run on a different story.
- // Unfortunately, axe doesn't support a cancel method to abort current run.
- // We check if the story we run against is still the current one,
- // if not, trigger a new run using the current story
- if (activeStoryId === storyId) {
- channel.emit(EVENTS.RESULT, resultJson);
- } else {
- active = false;
- run(activeStoryId);
- }
- }
+channel.on(EVENTS.MANUAL, async (storyId: string, input: A11yParameters = defaultParameters) => {
+ try {
+ const result = await run(input);
+ // Axe result contains class instances, which telejson deserializes in a
+ // way that violates:
+ // Content Security Policy directive: "script-src 'self' 'unsafe-inline'".
+ const resultJson = JSON.parse(JSON.stringify(result));
+ channel.emit(EVENTS.RESULT, resultJson, storyId);
} catch (error) {
channel.emit(EVENTS.ERROR, error);
- } finally {
- active = false;
}
-};
-
-channel.on(EVENTS.REQUEST, handleRequest);
-channel.on(EVENTS.MANUAL, run);
+});
greptile
logic: The removal of the active state tracking means multiple accessibility scans could now run concurrently, potentially causing race conditions in the results. Consider re-adding a mechanism to prevent or handle concurrent scans.
diff block
+import { ActionPanel, Form, Action, showToast, Toast, popToRoot, closeMainWindow } from "@raycast/api";
+import fs from "fs";
+import { resetFolderIcon } from "./utils/resetFolderIcon";
+
+export default function Command() {
+ return (
+ <Form
+ actions={
+ <ActionPanel>
+ <Action.SubmitForm
+ title="Reset Folder Icon"
+ onSubmit={async (values: { folder: string[]; image: string[] }) => {
+ if (values.folder.length === 0) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Please select a folder.",
+ });
+ return;
+ }
+ const folder = values.folder[0];
+ if (!fs.existsSync(folder) || !fs.lstatSync(folder).isDirectory()) {
greptile
style: Multiple fs operations on same path could lead to race conditions. Consider caching lstatSync result
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
greptile
logic: Global 'eslintCache' may cause race conditions if run concurrently. Consider isolating cache per run.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls occur while changedFiles is being populated. Consider using a lock or promise queue.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible here - changedFiles could be modified between null check and assignment
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Potential race condition if multiple runs occur simultaneously due to shared changedFiles state.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible here - changedFiles could be modified between null check and assignment
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state. Consider passing state as parameters instead.
diff block
} as CoreSystemMessage;
messages = [systemMessage, ...messages];
}
- const response = await fetch(
- `${import.meta.env.VITE_SUPABASE_API_URL}${FUNCTIONS_ROUTE}${BASE_API_ROUTE}${ApiRoutes.AI_V2}`,
- {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- Authorization: `Bearer ${authTokens.accessToken}`,
- },
- body: JSON.stringify({
- messages,
- useAnalytics: this.useAnalytics,
- requestType,
- } satisfies StreamRequestV2),
- signal: this.abortController.signal,
- },
- );
-
- if (response.status !== 200) {
- if (response.status === 403) {
- return {
- status: 'rate-limited',
- content: 'You have reached your daily limit.',
- rateLimitResult: await response.json(),
- };
- }
- const errorMessage = await response.text();
- throw new Error(errorMessage);
- }
- const reader = response.body?.getReader();
- if (!reader) {
- throw new Error('No response from server');
+ // Connect to WebSocket
+ const wsApi = import.meta.env.VITE_SUPABASE_API_URL?.replace('https://', 'wss://').replace('http://', 'ws://');
+ if (!wsApi) {
+ throw new Error('WebSocket API URL not found');
}
+ console.log('WebSocket API URL', wsApi);
+ const wsUrl = `${wsApi}${FUNCTIONS_ROUTE}${BASE_API_ROUTE}${ApiRoutes.AI_WS}`;
+ console.log('WebSocket URL', wsUrl);
+ this.ws = new WebSocket(wsUrl);
- let fullContent = '';
- while (true) {
- const { done, value } = await reader.read();
- if (done) {
- break;
- }
+ return new Promise((resolve, reject) => {
+ let fullContent = '';
+
+ this.ws!.onopen = () => {
+ // Send the request once connected
+ this.ws!.send(JSON.stringify({
+ messages,
+ useAnalytics: this.useAnalytics,
+ requestType,
+ authToken: authTokens.accessToken,
+ }));
+ };
greptile
logic: potential race condition if WebSocket connection is closed before onopen handler executes
suggested fix
this.ws!.onopen = () => {
// Send the request once connected
+ if (this.ws?.readyState === WebSocket.OPEN) {
+ this.ws.send(JSON.stringify({
messages,
useAnalytics: this.useAnalytics,
requestType,
authToken: authTokens.accessToken,
}));
+ }
};
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible here - changedFiles could be modified by another async operation between null check and assignment
diff block
loadAlerts()
push(urls.insightAlerts(insight.short_id as InsightShortId))
}}
+ insightLogicProps={insightLogicProps}
/>
greptile
logic: loadAlerts() is called before the URL change, which could lead to a race condition if the alerts haven't finished loading before the navigation completes. Consider using async/await or Promise chaining.
diff block
return Ok(models::AuthIdentityCompleteEmailVerificationResponse { status });
}
- let email_res = op!([ctx] user_resolve_email {
+ let email_res = ctx.op(::user::ops::resolve_email::Input {
emails: vec![res.email.clone()],
})
.await?;
greptile
logic: ensure that email_res.users is properly ordered to prevent potential race conditions when multiple users have the same email
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, which could lead to race conditions if multiple runs occur simultaneously
diff block
const runSQL = useMemoizedFn(
async ({
- datasetId,
-
+ dataSourceId,
sql,
- messageId,
- threadId
+ metricId
}: {
- messageId?: string;
- datasetId: string;
- threadId?: string;
+ dataSourceId: string;
+ metricId?: string;
sql: string;
}) => {
- return new Promise<RunSQLResponse>((resolve, reject) => {
- busterSocket.emitAndOnce({
- emitEvent: {
- route: '/sql/run',
- payload: {
- dataset_id: datasetId,
- sql
- }
- },
- responseEvent: {
- route: '/sql/run:runSql',
- callback: (d) => {
- const res = _onResponseRunSQL(d, sql, { messageId, threadId });
- resolve(res);
- },
- onError: reject
- }
+ try {
+ const result = await runSQLRest({
+ data_source_id: dataSourceId,
+ sql
});
- });
- }
- );
- const resetRunSQLData = useMemoizedFn(
- ({ messageId, threadId }: { messageId: string; threadId: string }) => {
- setWarnBeforeNavigating(false);
+ _onResponseRunSQL(result, sql, { metricId });
- if (!originalConfigs.current[messageId]) return;
- const oldConfig = originalConfigs.current[messageId]?.chartConfig;
- onUpdateThreadMessage({
- threadId,
- messageId,
- message: {
- chart_config: oldConfig
- }
- });
- onSetMessageData({
- messageId,
- data: originalConfigs.current[messageId]?.data!,
- data_metadata: originalConfigs.current[messageId]?.dataMetadata!,
- code: originalConfigs.current[messageId]?.code!,
- isDataFromRerun: false
- });
- delete originalConfigs.current[messageId];
+ return result;
+ } catch (error) {
+ //
+ }
}
);
+ const resetRunSQLData = useMemoizedFn(({ metricId }: { metricId: string }) => {
+ setWarnBeforeNavigating(false);
+
+ if (!originalConfigs.current[metricId]) return;
+ const oldConfig = originalConfigs.current[metricId]?.chartConfig;
+ onUpdateMetric({
+ id: metricId,
+ chart_config: oldConfig
+ });
+ onSetDataForMetric({
+ metricId,
+ data: originalConfigs.current[metricId]?.data!,
+ data_metadata: originalConfigs.current[metricId]?.dataMetadata!,
+ code: originalConfigs.current[metricId]?.code!,
+ isDataFromRerun: false
+ });
+ delete originalConfigs.current[metricId];
+ });
+
const saveSQL = useMemoizedFn(
- async ({ messageId, threadId, sql }: { messageId: string; threadId: string; sql: string }) => {
- const ogConfigs = originalConfigs.current[messageId];
- const currentMessage = getThreadMessage({ threadId, messageId });
- const datasetId = currentMessage?.dataset_id!;
+ async ({
+ metricId,
+ sql,
+ dataSourceId: dataSourceIdProp
+ }: {
+ metricId: string;
+ sql: string;
+ dataSourceId?: string;
+ }) => {
+ const ogConfigs = originalConfigs.current[metricId];
+ const currentMetric = getMetricMemoized({ metricId });
+ const dataSourceId = dataSourceIdProp || currentMetric?.data_source_id;
- if (!ogConfigs || ogConfigs.code !== sql) {
+ if ((!ogConfigs || ogConfigs.code !== sql) && dataSourceId) {
greptile
logic: Potential race condition if dataSourceId is undefined but ogConfigs/code check passes
diff block
throw new Error('HogFunctionManagerService is not ready! Run HogFunctionManagerService.start() before this')
}
- return this.cache.functions[id]
+ return this.hogFunctions[id]
}
public teamHasHogDestinations(teamId: Team['id']): boolean {
return !!Object.keys(this.getTeamHogFunctions(teamId)).length
}
+ /**
+ * Reloads all hog functions that have updates since the last full reload.
+ * If we have never loaded then we only load enabled hog functions
+ * Otherwise we load all hog functions that have been updated so we can also remove
+ */
public async reloadAllHogFunctions(): Promise<void> {
const items = (
- await this.hub.postgres.query<HogFunctionType>(
- PostgresUse.COMMON_READ,
- `
- SELECT ${HOG_FUNCTION_FIELDS.join(', ')}
- FROM posthog_hogfunction
- WHERE deleted = FALSE AND enabled = TRUE AND type = ANY($1)
- ORDER BY execution_order NULLS LAST, created_at ASC
- `,
- [this.hogTypes],
- 'fetchAllHogFunctions'
- )
+ this.lastUpdatedAt
+ ? // If we have the latest updated at timestamp for a hog function then we load all updated hog functions
+ // Whether deleted/enabled or not
+ await this.hub.postgres.query<HogFunctionType>(
+ PostgresUse.COMMON_READ,
+ `SELECT ${HOG_FUNCTION_FIELDS.join(', ')} FROM posthog_hogfunction WHERE type = ANY($1)
+ AND updated_at > $2
+ ORDER BY updated_at ASC`,
+ [this.hogTypes, this.lastUpdatedAt],
+ 'fetchUpdatedHogFunctions'
+ )
+ : // Otherwise just load all enabled functions
+ await this.hub.postgres.query<HogFunctionType>(
+ PostgresUse.COMMON_READ,
+ `SELECT ${HOG_FUNCTION_FIELDS.join(', ')} FROM posthog_hogfunction WHERE type = ANY($1)
+ AND deleted = FALSE AND enabled = TRUE
+ ORDER BY updated_at ASC`,
+ [this.hogTypes],
+ 'fetchAllHogFunctions'
+ )
).rows
greptile
logic: Race condition possible here - if items are updated between the two queries, some updates could be missed. Consider using a transaction or timestamp-based windowing.
diff block
+import juno from "juno-sdk";
+
+let initialized = false;
greptile
style: Using a module-level variable for initialization state could cause race conditions in certain environments. Consider using a more robust singleton pattern.
diff block
stripeSubscriptionItemId,
quantity,
startDate,
+ endDate,
}: {
orgId: string;
stripeSubscriptionItemId: string;
quantity: number;
startDate: Date;
+ endDate: Date;
}): Promise<Result<string, string>> {
try {
const usageRecord = await this.stripeClient.addUsageRecord({
subscriptionItemId: stripeSubscriptionItemId,
quantity: quantity,
timestamp: "now",
greptile
logic: Using 'now' as timestamp while also using 'set' action could lead to race conditions between multiple cron runs. Consider using endDate.getTime()/1000.
diff block
],
textTiles: [(s) => [s.tiles], (tiles) => tiles.filter((t) => !!t.text)],
itemsLoading: [
- (s) => [s._dashboardLoading, s.refreshStatus],
- (dashboardLoading, refreshStatus) => {
- return dashboardLoading || Object.values(refreshStatus).some((s) => s.loading || s.queued)
+ (s) => [s._dashboardLoading, s.refreshStatus, s.initialVariablesLoaded],
+ (dashboardLoading, refreshStatus, initialVariablesLoaded) => {
+ return (
+ dashboardLoading ||
+ Object.values(refreshStatus).some((s) => s.loading || s.queued) ||
+ (QUERY_VARIABLES_KEY in router.values.searchParams && !initialVariablesLoaded)
+ )
},
greptile
logic: Race condition possible here - itemsLoading could return false before variables are actually applied if initialVariablesLoaded is set to true too early
diff block
return response.Response({"descendants": descendants})
+ @action(methods=["POST"], detail=True)
+ def cancel(self, request: request.Request, *args, **kwargs) -> response.Response:
+ """Cancel a running saved query workflow."""
+ saved_query = self.get_object()
+
+ if saved_query.status != DataWarehouseSavedQuery.Status.RUNNING:
+ return response.Response(
+ {"error": "Cannot cancel a query that is not running"}, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ temporal = sync_connect()
+ workflow_id = f"data-modeling-run-{saved_query.id.hex}"
+
+ try:
+ # Ad-hoc handling
+ workflow_handle = temporal.get_workflow_handle(workflow_id)
+ if workflow_handle:
+ async_to_sync(workflow_handle.cancel)()
+ DataModelingJob.objects.filter(
+ saved_query_id=saved_query.id, status=DataModelingJob.Status.RUNNING, workflow_id=workflow_id
+ ).update(status=DataModelingJob.Status.CANCELLED, last_run_at=timezone.now())
greptile
logic: Race condition possible between checking workflow_handle and updating job status. Consider using select_for_update() or moving the status update after successful cancellation
diff block
-import React, { useState, useCallback } from 'react';
+import React, { useState } from 'react';
import './App.css';
import TodoForm from './components/TodoForm';
-import TodoItem from './components/TodoItem';
function App() {
const [todos, setTodos] = useState([]);
const [idCounter, setIdCounter] = useState(0);
- const addTodo = useCallback((text) => {
- setIdCounter(prev => prev + 1);
- setTodos(prev => [...prev, {
+ const addTodo = (text) => {
+ setIdCounter(idCounter + 1);
+ const newTodo = {
id: idCounter,
- text,
+ text: text,
completed: false,
priority: 'low'
- }]);
- }, [idCounter]);
+ };
+ setTodos([...todos, newTodo]);
greptile
logic: Race condition possible here. setIdCounter and setTodos should use the prev => pattern to ensure state updates are based on latest values
suggested fix
+ setIdCounter(prev => prev + 1);
const newTodo = {
+ id: idCounter,
text: text,
+ completed: false,
+ priority: 'low'
};
+ setTodos(prev => [...prev, newTodo]);
diff block
+package org.owasp.webgoat.container.session;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * This class is responsible for managing user session data within a lesson. It uses a HashMap to
+ * store key-value pairs representing session data.
+ */
+public class LessonSession {
+
+ private Map<String, Object> userSessionData = new HashMap<>();
+
+ /** Default constructor initializing an empty session. */
+ public LessonSession() {}
+
+ /**
+ * Retrieves the value associated with the given key.
+ *
+ * @param key the key for the session data
+ * @return the value associated with the key, or null if the key does not exist
+ */
+ public Object getValue(String key) {
+ if (!userSessionData.containsKey(key)) {
+ return null;
+ }
+ // else
+ return userSessionData.get(key);
+ }
greptile
logic: The containsKey() check followed by get() creates a race condition. Consider using a single get() call and checking for null, or using computeIfPresent() for atomic operations
suggested fix
public Object getValue(String key) { return userSessionData.get(key); }
diff block
+/*
+ * This file is part of WebGoat, an Open Web Application Security Project utility. For details, please see http://www.owasp.org/
+ *
+ * Copyright (c) 2002 - 2019 Bruce Mayhew
+ *
+ * This program is free software; you can redistribute it and/or modify it under the terms of the
+ * GNU General Public License as published by the Free Software Foundation; either version 2 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
+ * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along with this program; if
+ * not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
+ * 02111-1307, USA.
+ *
+ * Getting Source ==============
+ *
+ * Source for this application is maintained at https://github.com/WebGoat/WebGoat, a repository for free software projects.
+ */
+
+package org.owasp.webgoat.lessons.clientsidefiltering;
+
+import jakarta.annotation.PostConstruct;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.util.FileCopyUtils;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.ResponseBody;
+import org.springframework.web.bind.annotation.RestController;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+@RestController
+@Slf4j
+public class Salaries {
+
+ @Value("${webgoat.user.directory}")
+ private String webGoatHomeDirectory;
+
+ @PostConstruct
+ public void copyFiles() {
+ ClassPathResource classPathResource = new ClassPathResource("lessons/employees.xml");
+ File targetDirectory = new File(webGoatHomeDirectory, "/ClientSideFiltering");
+ if (!targetDirectory.exists()) {
+ targetDirectory.mkdir();
+ }
greptile
logic: Directory creation is not atomic and vulnerable to TOCTOU race conditions. Use `mkdirs()` with proper permission checks.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, which could lead to race conditions if multiple runs happen simultaneously
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen while changedFiles is being populated. Consider adding a lock or making the cache operation atomic.
diff block
+import React, { useMemo, useState } from 'react'
+import { ScrollView, StyleProp, ViewStyle } from 'react-native'
+
+import { getDisplayOrderQuestions, SurveyAppearanceTheme } from '../surveys-utils'
+import { Survey, SurveyAppearance, SurveyQuestion } from '../../../../posthog-core/src/posthog-surveys-types'
+import { LinkQuestion, MultipleChoiceQuestion, OpenTextQuestion, RatingQuestion } from './QuestionTypes'
+import { PostHog } from '../../posthog-rn'
+import { usePostHog } from '../../hooks/usePostHog'
+
+const getSurveyInteractionProperty = (survey: Survey, action: string): string => {
+ let surveyProperty = `$survey_${action}/${survey.id}`
+ if (survey.current_iteration && survey.current_iteration > 0) {
+ surveyProperty = `$survey_${action}/${survey.id}/${survey.current_iteration}`
+ }
+
+ return surveyProperty
+}
+
+export const sendSurveyShownEvent = (survey: Survey, posthog: PostHog): void => {
+ posthog.capture('survey shown', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ })
+}
+
+export const sendSurveyEvent = (
+ responses: Record<string, string | number | string[] | null> = {},
+ survey: Survey,
+ posthog: PostHog
+): void => {
+ posthog.capture('survey sent', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ $survey_questions: survey.questions.map((question) => question.question),
+ ...responses,
+ $set: {
+ [getSurveyInteractionProperty(survey, 'responded')]: true,
+ },
+ })
+}
+
+export const dismissedSurveyEvent = (survey: Survey, posthog: PostHog): void => {
+ posthog.capture('survey dismissed', {
+ $survey_name: survey.name,
+ $survey_id: survey.id,
+ $survey_iteration: survey.current_iteration,
+ $survey_iteration_start_date: survey.current_iteration_start_date,
+ $set: {
+ [getSurveyInteractionProperty(survey, 'dismissed')]: true,
+ },
+ })
+}
+
+export function Questions({
+ survey,
+ appearance,
+ styleOverrides,
+ onSubmit,
+}: {
+ survey: Survey
+ appearance: SurveyAppearanceTheme
+ styleOverrides?: StyleProp<ViewStyle>
+ onSubmit: () => void
+}): JSX.Element {
+ const [questionsResponses, setQuestionsResponses] = useState({})
+ const [currentQuestionIndex, setCurrentQuestionIndex] = useState(0)
+ const surveyQuestions = useMemo(() => getDisplayOrderQuestions(survey), [survey])
+ const posthog = usePostHog()
+
+ const onNextButtonClick = ({
+ res,
+ originalQuestionIndex,
+ displayQuestionIndex,
+ }: {
+ res: string | string[] | number | null
+ originalQuestionIndex: number
+ displayQuestionIndex: number
+ }): void => {
+ const responseKey = originalQuestionIndex === 0 ? `$survey_response` : `$survey_response_${originalQuestionIndex}`
+
+ setQuestionsResponses({ ...questionsResponses, [responseKey]: res })
greptile
style: potential race condition when updating questionsResponses state. Consider using functional update pattern
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple instances run simultaneously. The changedFiles null check isn't thread-safe.
diff block
public readonly composer;
+ private _getEditComposerRuntimeCore = () => {
+ return this._threadBinding
+ .getState()
+ .getEditComposer(this._core.getState().id);
+ };
+
public getState() {
return this._core.getState();
}
- public reload({ runConfig = {} }: ReloadConfig = {}) {
+ public reload(reloadConfig: ReloadConfig = {}) {
+ const editComposerRuntimeCore = this._getEditComposerRuntimeCore();
+ const composerRuntimeCore =
+ editComposerRuntimeCore ?? this._threadBinding.getState().composer;
+ const composer = editComposerRuntimeCore ?? composerRuntimeCore;
+
+ const { runConfig = composer.runConfig } = reloadConfig;
greptile
logic: potential race condition if composer state changes between getting editComposerRuntimeCore and composerRuntimeCore
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible if multiple calls occur before changedFiles is populated
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles(), which could lead to race conditions if multiple runs occur simultaneously
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible here - changedFiles could be modified between null check and assignment
diff block
.extract_is_cookieless_mode()
.ok_or(CaptureError::InvalidCookielessMode)?,
};
+
+ // if this event was historical but not assigned to the right topic
+ // by the submitting user (i.e. no historical prop flag in event)
+ // we should route it there using event#now if older than 1 day
+ let should_reroute_event = if raw_event_timestamp.is_some() {
+ let days_stale = Duration::days(historical_cfg.historical_rerouting_threshold_days);
+ let threshold = Utc::now() - days_stale;
+ let decision = raw_event_timestamp.unwrap().to_utc() <= threshold;
+ if decision {
greptile
logic: Potential race condition - using Utc::now() after unwrapping timestamp could lead to incorrect comparisons if execution is delayed. Consider passing current time as parameter.
suggested fix
let days_stale = Duration::days(historical_cfg.historical_rerouting_threshold_days);
+ let threshold = context.now.clone() - days_stale;
let decision = raw_event_timestamp.unwrap().to_utc() <= threshold;
if decision {
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, which could lead to race conditions if multiple runs occur simultaneously
diff block
} from './constants';
import { NextApiRequestCollect } from 'pages/api/send';
-let lookup;
+
+// 1. Initialize lookup OUTSIDE any function, but make it a Promise.
+let lookupPromise: any = null;
+
+// 2. Create an initialization function.
+async function initializeMaxmind() {
+ if (!lookupPromise) {
+ // eslint-disable-next-line no-console
+ console.log('debug: loading GeoLite2-City.mmdb');
+ const dir = path.join(process.cwd(), 'geo');
+ const dbPath = path.resolve(dir, 'GeoLite2-City.mmdb');
+
+ // Use try/catch for error handling during DB loading
+ try {
+ lookupPromise = maxmind.open(dbPath);
+ } catch (error) {
+ console.error("Error loading GeoLite2 database:", error);
+ // CRITICAL: You MUST handle the error here. Throwing an error
+ // will cause the server to crash, which is better than running
+ // without the database.
+ throw error; // Re-throw to prevent the app from starting.
+ }
+ }
+ return lookupPromise;
+}
+
+initializeMaxmind()
greptile
logic: This initialization should be awaited and handled properly during application startup to prevent race conditions
diff block
public String getUserName() {
return currentUser.getUsername();
}
+
+ public WebGoatUser getUser() {
+ return currentUser;
+ }
+
+ public void toggleSecurity() {
+ this.securityEnabled = !this.securityEnabled;
+ }
greptile
style: Consider adding synchronization to prevent race conditions when multiple threads toggle security state
suggested fix
+ public synchronized void toggleSecurity() {
this.securityEnabled = !this.securityEnabled;
}
diff block
const { setExperimentFormValue, selectVariant, selectElementType, inspectForElementWithIndex } =
useActions(experimentsTabLogic)
- const [editSelectorShowing, setEditSelectorShowing] = useState(false)
-
- const selectedContentType = transform.html ? 'html' : 'text'
return (
<>
- <div className="flex-1 mb-2">
+ <div className="inline-flex space-x-2">
<LemonButton
+ icon={<IconCursorClick />}
size="small"
type={inspectingElement === transformIndex && selectedVariant === variant ? 'primary' : 'secondary'}
- sideAction={{
- dropdown: {
- overlay: (
- <>
- {Object.entries(ElementSelectorButtonTypes).map(([key, value]) => {
- return (
- <LemonButton
- key={'element-selector-' + key}
- fullWidth
- type={
- inspectingElement === transformIndex &&
- selectedVariant === variant &&
- selectedElementType === key
- ? 'primary'
- : 'tertiary'
- }
- onClick={(e) => {
- e.stopPropagation()
- selectVariant(variant)
- selectElementType(key as ElementSelectorType)
- inspectForElementWithIndex(
- variant,
- key as ElementSelectorType,
- inspectingElement === transformIndex ? null : transformIndex
- )
- }}
- >
- {value}
- </LemonButton>
- )
- })}
- <LemonDivider className="my-1" />
- <LemonButton
- fullWidth
- type="tertiary"
- icon={<IconPencil />}
- onClick={() => {
- setEditSelectorShowing(true)
- }}
- >
- Edit selector
- </LemonButton>
- </>
- ),
- placement: 'bottom',
- matchWidth: true,
- },
+ onClick={() => {
+ selectVariant(variant)
+ inspectForElementWithIndex(
+ variant,
+ selectedElementType as ElementSelectorType,
+ inspectingElement === transformIndex ? null : transformIndex
+ )
}}
>
- {transform.selector ? 'Change element' : 'Select element'}
+ {inspectingElement === transformIndex && selectedVariant === variant
+ ? 'Selecting ...'
+ : 'Select element'}
</LemonButton>
+ <LemonSelect
+ placeholder="Select element type"
+ value={selectedElementType}
+ options={Object.entries(ElementSelectorButtonTypes).map(([key, value]) => ({
+ label: value,
+ value: key,
+ }))}
+ onChange={(value) => {
+ selectElementType(value as ElementSelectorType)
+ }}
+ />
</div>
- {editSelectorShowing && (
- <div className="mb-2">
- <LemonInput
- value={transform.selector}
- onChange={(value) => {
- if (experimentForm.variants) {
- const variants = { ...experimentForm.variants }
- variants[variant].transforms[transformIndex].selector = value
- setExperimentFormValue('variants', variants)
- }
- }}
- placeholder="HTML element selector"
- />
- </div>
- )}
{transform.selector && (
<div>
<div className="mt-4">
- <LemonLabel>Content</LemonLabel>
- <LemonSegmentedButton
- className="mb-1"
- fullWidth
- options={[
- {
- value: 'text',
- label: 'Text',
- icon: <IconMessage />,
- },
- {
- value: 'html',
- label: 'HTML',
- icon: <IconCode />,
- },
- ]}
- onChange={(newSelectedContentType) => {
- const variantConfig = experimentForm.variants[variant]
- if (variantConfig && transform.selector) {
- // Before changing the content type, restore the original html state for this selector
- const originalHtmlState = experimentForm.original_html_state?.[transform.selector]
- if (originalHtmlState) {
- const element = document.querySelector(transform.selector) as HTMLElement
- if (element) {
- element.innerHTML = originalHtmlState.innerHTML
- element.textContent = originalHtmlState.textContent
- }
- }
-
- // Copy the original html state to the new transform, and delete the previously selected content type
- const element = document.querySelector(transform.selector) as HTMLElement
- if (element) {
- const newTransform = { ...transform }
-
- if (newSelectedContentType === 'html') {
- newTransform.html =
- experimentForm.original_html_state?.[transform.selector]?.innerHTML
- delete newTransform.text
- }
- if (newSelectedContentType === 'text' && element.textContent) {
- newTransform.text =
- experimentForm.original_html_state?.[transform.selector]?.textContent
- delete newTransform.html
- }
+ <LemonLabel>Inner HTML</LemonLabel>
+ <LemonTextArea
+ onChange={(value) => {
+ // Update state
+ const updatedVariants = {
+ ...experimentForm.variants,
+ [variant]: {
+ ...experimentForm.variants[variant],
+ transforms: experimentForm.variants[variant].transforms.map((t, i) =>
+ i === transformIndex ? { ...t, html: value } : t
+ ),
+ },
+ }
+ setExperimentFormValue('variants', updatedVariants)
- const updatedVariants = {
- ...experimentForm.variants,
- [variant]: {
- ...variantConfig,
- transforms: variantConfig.transforms.map((t, i) =>
- i === transformIndex ? newTransform : t
- ),
- },
- }
- setExperimentFormValue('variants', updatedVariants)
- }
+ // Update DOM
+ const element = transform.selector
+ ? (document.querySelector(transform.selector) as HTMLElement)
+ : null
+ if (element) {
+ element.innerHTML = value
}
greptile
logic: Direct manipulation of DOM here could cause race conditions or inconsistencies if multiple experiments target the same element. Consider using a more controlled approach or adding checks for conflicting modifications.
diff block
+import os
+import uuid
+import asyncio
+import threading
+from typing import List, Dict, Optional
+
+from dotenv import load_dotenv
+from langchain.text_splitter import RecursiveCharacterTextSplitter
+from langchain_openai import OpenAIEmbeddings
+from qdrant_client.http.models import PointStruct
+from more_itertools import chunked
+from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_exception_type
+from qdrant_client.http.exceptions import ResponseHandlingException
+
+from qdrant_wrapper.qdrant_base import QdrantBase
+from qdrant_client.http.models import VectorParams, Distance
+
+load_dotenv()
+# Constants
+CHUNK_SIZE = 8000
+CHUNK_OVERLAP = 500
+SEPARATOR = "块"
+BATCH_SIZE = 50
+EMBEDDING_BATCH_SIZE = 32
+MAX_RETRY_ATTEMPTS = 5
+RETRY_MULTIPLIER = 1
+MIN_RETRY_WAIT = 1
+MAX_RETRY_WAIT = 10
+VECTOR_SIZE = 1536
+VECTOR_DISTANCE = Distance.COSINE
+
+class QdrantIngestor(QdrantBase):
+ """Class for ingesting documents into Qdrant."""
+
+ def __init__(self, document_id: str, document_content: str):
+ super().__init__()
+ self.text_splitter = RecursiveCharacterTextSplitter(
+ chunk_size=CHUNK_SIZE,
+ chunk_overlap=CHUNK_OVERLAP,
+ separators=[SEPARATOR],
+ keep_separator=False
+ )
+ self.document_id = document_id
+ self.document_content = document_content
+
+ @retry(
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, min=MIN_RETRY_WAIT, max=MAX_RETRY_WAIT),
+ retry=retry_if_exception_type(ResponseHandlingException)
+ )
+ async def _upsert_batch(self, batch: List[PointStruct]):
+ """Upsert a batch of points with retry logic."""
+ await self.async_client.upsert(
+ collection_name=self.document_id,
+ points=batch
+ )
+
+ async def upsert_in_batches(self, points: List[PointStruct], batch_size: int = BATCH_SIZE):
+ """Upsert points in batches to avoid overloading the service."""
+ for batch in chunked(points, batch_size):
+ await self._upsert_batch(list(batch))
+
+ async def process_section(self, section_info: Dict, extract_section_content) -> int:
+ """Process a single section from the document."""
+ if not self.document_content:
+ raise ValueError("HTML content must be loaded before processing sections")
+
+ section_name = section_info['sectionName']
+ section_number = section_info['sectionNumber']
+ chapter_number = section_info['chapterNumber']
+ full_number = f"{chapter_number}.{section_number}"
+
+ content_dict = extract_section_content(self.document_content, full_number)
+ if "error" in content_dict:
+ print(f"Error extracting content: {content_dict['error']}")
+ return 0
+
+ content_text = content_dict["content"]
+ chunks = self.text_splitter.split_text(content_text)
+ if not chunks:
+ return 0
+
+ thread_name = threading.current_thread().name
+ print(f"[{thread_name}] Starting section {chapter_number}.{section_number}")
greptile
logic: mixing threading and asyncio can lead to race conditions - remove threading.current_thread() usage
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen simultaneously due to shared changedFiles state. Consider making this thread-safe.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible here - multiple simultaneous calls could try to initialize changedFiles at the same time
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after await - potential race condition if multiple calls occur simultaneously
diff block
}
async sendProgressReport(payload: TestingModuleProgressReportPayload) {
- this.channel.emit(TESTING_MODULE_PROGRESS_REPORT, payload);
+ this.channel.emit(TESTING_MODULE_PROGRESS_REPORT, {
+ ...payload,
+ details: { ...payload.details, selectedStoryCount: this.selectedStoryCountForLastestRun },
+ });
+
+ const status = 'status' in payload ? payload.status : undefined;
+ const progress = 'progress' in payload ? payload.progress : undefined;
+ if (
+ ((status === 'success' || status === 'cancelled') && progress?.finishedAt) ||
+ status === 'failed'
+ ) {
greptile
logic: Race condition possible here - if a new test run starts before the previous one finishes, the count could be reset prematurely
diff block
// This is the input stream from elsewhere so we want to do some proper validation
const event = CdpInternalEventSchema.parse(kafkaEvent)
- const [teamHogFunctions, team] = await Promise.all([
- this.hogFunctionManager.getHogFunctionsForTeam(event.team_id, ['destination']),
- this.hub.teamManager.fetchTeam(event.team_id),
- ])
-
- if (!teamHogFunctions || !team) {
+ if (!this.hogFunctionManager.teamHasHogDestinations(event.team_id)) {
+ // No need to continue if the team doesn't have any functions
return
}
greptile
logic: This early return relies on teamHasHogDestinations being synchronous, but the method name suggests it could be async. Verify that this method is indeed synchronous to avoid potential race conditions.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs execute simultaneously - changedFiles could be modified between null check and assignment
diff block
);
console.log(`adding back cached emails`, newCache.length);
- await env.UTILITY_KV.put("loop_user_emails_v10", JSON.stringify(newCache), {
- expirationTtl: 60 * 60 * 24, // 1 day
+ await safePut({
+ key: env.UTILITY_KV,
+ keyName: "loop_user_emails_v10",
+ value: JSON.stringify(newCache),
+ options: { expirationTtl: 60 * 60 * 24 }, // 1 day
});
greptile
style: KV store update inside user loop could cause race conditions if multiple users are processed simultaneously
diff block
+diff a/macos/Onit/UI/Prompt/TextInputView.swift b/macos/Onit/UI/Prompt/TextInputView.swift (rejected hunks)
+@@ -20,6 +20,7 @@ struct TextInputView: View {
+ @Default(.mode) var mode
+
+ @State private var textHeight: CGFloat = 20
++ @State private var isProcessingURL: Bool = false
+ private let maxHeightLimit: CGFloat = 100
+
+ var body: some View {
+@@ -52,9 +53,25 @@ struct TextInputView: View {
+ .frame(height: min(textHeight, maxHeightLimit))
+ .onAppear { focused = true }
+ .onChange(of: model.textFocusTrigger) { focused = true }
++ .onChange(of: model.pendingInstruction) { oldValue, newValue in
++ // Check for URLs when text changes
++ if !isProcessingURL && !newValue.isEmpty {
++ Task {
++ await checkForURLs()
++ }
++ }
++ }
greptile
logic: No task cancellation when pendingInstruction changes rapidly. Could lead to race conditions and unnecessary processing. Consider using a task cancellation pattern.
diff block
+import {
+ Action,
+ ActionPanel,
+ Clipboard,
+ Form,
+ getPreferenceValues,
+ Icon,
+ popToRoot,
+ showToast,
+ Toast,
+} from "@raycast/api";
+import { FormValidation, useForm } from "@raycast/utils";
+import { createPaste } from "./lib/privatebin";
+import { generatePassword, isMarkdown } from "./lib/tools";
+import * as fs from "node:fs";
+import { useEffect } from "react";
+
+interface PasteDataForm {
+ pasteData: string;
+ password: string;
+ expire: string;
+ burnAfterRead: boolean;
+ format: string;
+ attachment: string[];
+}
+
+export default function Command() {
+ const expirations = {
+ "5min": "5 minutes",
+ "10min": "10 minutes",
+ "1hour": "1 hour",
+ "1day": "1 day",
+ "1week": "1 week",
+ "1month": "1 month",
+ "1year": "1 year",
+ never: "Never",
+ };
+ const formats = { plaintext: "Plain Text", syntaxhighlighting: "Source Code", markdown: "Markdown" };
+
+ const { url, includePassword } = getPreferenceValues();
+
+ const { handleSubmit, itemProps, setValue, values } = useForm<PasteDataForm>({
+ onSubmit: async (values) => {
+ await showToast({
+ style: Toast.Style.Animated,
+ title: "Encrypting data...",
+ });
+
+ let filePath: string | null = values.attachment[0] ?? null;
+ if (filePath && (!fs.existsSync(filePath) || !fs.lstatSync(filePath).isFile())) {
+ filePath = null;
+ }
+
+ const { id, pasteKey } = await createPaste(
+ values.pasteData,
+ values.expire,
+ values.password,
+ values.burnAfterRead,
+ filePath,
+ );
+
+ let copyText = `${url.replace(/\/+$/, "")}/?${id}#${pasteKey}`;
+ if (includePassword && values.password) {
+ copyText = `${copyText}\nPassword: ${values.password}`;
+ }
+
+ await Clipboard.copy(copyText);
+
+ await showToast({
+ style: Toast.Style.Success,
+ title: "Share URL copied to clipboard",
+ });
+
+ await popToRoot();
+ },
+ validation: {
+ pasteData: FormValidation.Required,
+ expire: FormValidation.Required,
+ format: FormValidation.Required,
+ attachment: (value) => (value && value?.length > 1 ? "Maximum 1 attachment can be selected" : undefined),
+ },
+ initialValues: {
+ expire: "1day",
+ },
+ });
+
+ const createAndCopyPassword = async () => {
+ const password = generatePassword(8);
+ setValue("password", password);
+ showToast({
+ style: Toast.Style.Success,
+ title: "Password created" + (includePassword ? "" : " and copied to clipboard"),
+ });
+ if (!includePassword) {
+ await Clipboard.copy(password);
+ }
+ };
greptile
style: The `showToast` call is not awaited here, which could lead to race conditions if the toast needs to complete before the clipboard operation.
suggested fix
const createAndCopyPassword = async () => {
const password = generatePassword(8);
setValue("password", password);
await showToast({
style: Toast.Style.Success,
title: "Password created" + (includePassword ? "" : " and copied to clipboard"),
});
if (!includePassword) {
await Clipboard.copy(password);
}
};
diff block
+import { List, showToast, Toast, Icon, getPreferenceValues, useNavigation, Color } from "@raycast/api";
+import { usePromise } from "@raycast/utils";
+import { useState, useEffect, useCallback } from "react";
+import fs from "fs";
+import { getMarkdownFiles } from "./utils/fileOperations";
+import { getAllUniqueTags, isSystemTag, getSystemTag } from "./utils/tagOperations";
+import { groupFilesByFolder } from "./utils/groupOperations";
+import { CreateFileForm } from "./components/CreateFileForm";
+import { FileListItem } from "./components/FileListItem";
+import { PaginationSection } from "./components/PaginationSection";
+import { CommonActions, LoadMoreAction } from "./components/ActionComponents";
+import { MarkdownEmptyView } from "./components/MarkdownEmptyView";
+import { TagSearchList } from "./components/TagSearchList";
+import path from "path";
+
+export const markdownDir = getPreferenceValues<{ markdownDir: string }>().markdownDir;
+
+const ITEMS_PER_PAGE = 20;
+const INITIAL_LOAD_LIMIT = 50;
+const LOAD_INCREMENT = 50;
+
+export default function Command() {
+ const { push } = useNavigation();
+ const [searchText, setSearchText] = useState("");
+ const [currentPage, setCurrentPage] = useState(0);
+ const [selectedTag, setSelectedTag] = useState<string | null>(null);
+ const [showColorTags, setShowColorTags] = useState(true);
+ const [selectedFolder, setSelectedFolder] = useState<string>("");
+ const [loadLimit, setLoadLimit] = useState<number>(INITIAL_LOAD_LIMIT);
+ const [totalFiles, setTotalFiles] = useState<number>(0);
+ const [rootDirectory, setRootDirectory] = useState<string>(markdownDir);
+
+ // Validate markdownDir
+ useEffect(() => {
+ if (!markdownDir || !fs.existsSync(markdownDir)) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Invalid Markdown Directory",
+ message: "Please set a valid directory in preferences.",
+ });
+ } else {
+ setRootDirectory(markdownDir);
+ }
+ }, [markdownDir]);
+
+ // Initialize total files count
+ useEffect(() => {
+ const getTotalFiles = async () => {
+ try {
+ const allFiles = await getMarkdownFiles();
+ setTotalFiles(allFiles.length);
+ console.log(`Total files: ${allFiles.length}`);
+ } catch (error) {
+ console.error("Error getting total files:", error);
+ }
+ };
+
+ getTotalFiles();
+ }, []);
+
+ // Define the fetch function
+ const fetchMarkdownFiles = useCallback(async () => {
+ console.log(`Fetching files with limit: ${loadLimit}`);
+ const files = await getMarkdownFiles(loadLimit);
+ console.log(`Loaded ${files.length} files, limit: ${loadLimit}, total: ${totalFiles}`);
+ return files;
+ }, [loadLimit, totalFiles]);
+
+ // Get the Markdown files
+ const { data, isLoading, error, revalidate } = usePromise(fetchMarkdownFiles, [], {
+ execute: true,
+ });
+
+ // Handle errors
+ useEffect(() => {
+ if (error) {
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Loading Markdown files failed",
+ message: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }, [error]);
+
+ // Debug log for key variables
+ useEffect(() => {
+ console.log(`loadLimit: ${loadLimit}, totalFiles: ${totalFiles}, selectedTag: ${selectedTag}`);
+ }, [loadLimit, totalFiles, selectedTag]);
+
+ // Reload files when loadLimit changes
+ useEffect(() => {
+ revalidate();
+ }, [loadLimit, revalidate]);
+
+ // Filtering and paging data
+ const filteredData = data
+ ? data.filter(
+ (file) =>
+ (file.name.toLowerCase().includes(searchText.toLowerCase()) ||
+ file.folder.toLowerCase().includes(searchText.toLowerCase())) &&
+ (!selectedTag || file.tags.includes(selectedTag)),
+ )
+ : [];
+ console.log("Filtered data count:", filteredData.length);
+
+ const totalPages = Math.ceil(filteredData.length / ITEMS_PER_PAGE);
+ const paginatedData = filteredData.slice(currentPage * ITEMS_PER_PAGE, (currentPage + 1) * ITEMS_PER_PAGE);
+ console.log("Paginated data count:", paginatedData.length);
+
+ // Calculate the current page display range
+ const startItem = currentPage * ITEMS_PER_PAGE + 1;
+ const endItem = Math.min((currentPage + 1) * ITEMS_PER_PAGE, filteredData.length);
+ const pageInfoText =
+ filteredData.length > 0
+ ? `Showing ${startItem}-${endItem} of ${filteredData.length} (Total ${totalFiles} files)`
+ : "File not found";
+
+ // Navigate to the Create File form
+ const showCreateFileForm = () => {
+ push(<CreateFileForm rootDirectory={rootDirectory} currentFolder={selectedFolder} onFileCreated={revalidate} />);
+ };
+
+ // Get all tags
+ const allTags = data ? getAllUniqueTags(data, showColorTags) : [];
+
+ // Update rootDirectory if data is available
+ useEffect(() => {
+ if (data && data.length > 0 && !rootDirectory) {
+ const firstFilePath = data[0].path;
+ const folderPath = path.dirname(firstFilePath);
+ setRootDirectory(folderPath === markdownDir ? markdownDir : folderPath);
+ console.log("Set root directory:", rootDirectory);
+ }
+ }, [data, rootDirectory]);
greptile
logic: rootDirectory update effect has a potential race condition since it reads rootDirectory in the console.log but uses it after a state update
suggested fix
useEffect(() => {
if (data && data.length > 0 && !rootDirectory) {
const firstFilePath = data[0].path;
const folderPath = path.dirname(firstFilePath);
+ const newRootDirectory = folderPath === markdownDir ? markdownDir : folderPath;
+ setRootDirectory(newRootDirectory);
+ console.log("Set root directory:", newRootDirectory);
}
}, [data, rootDirectory]);
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple concurrent runs modify changedFiles - consider using a lock or making changedFiles function-scoped
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: listChangedFiles() is now awaited but the function might be called multiple times concurrently, potentially causing race conditions
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: listChangedFiles() is now awaited but the function might be called multiple times concurrently, potentially causing race conditions with the shared changedFiles variable
diff block
+//
+// WebContentContext.swift
+// Onit
+//
+// Created by Loyd Kim on 3/11/2024.
+//
+
+import Foundation
+import SwiftUI
+
+/// A class to handle web content scraping and context creation
+@MainActor
+class WebContentContext {
+
+ /// Maximum length of web content to include in context
+ internal static let maxContentLength = 10000
+
+ /// Processes text input to detect URLs and create context from web content
+ /// - Parameters:
+ /// - text: The text input that may contain URLs
+ /// - model: The Onit model to update with context
+ /// - Returns: A tuple containing the processed text (with URL removed) and a boolean indicating if URLs were found
+ static func processTextForURLs(text: String, model: OnitModel) async -> (processedText: String, foundURLs: Bool) {
+ // Detect URLs in the text
+ let urls = URLDetector.detectURLs(in: text)
+
+ guard !urls.isEmpty else {
+ return (text, false)
+ }
+
+ // Process each URL
+ for (index, url) in urls.enumerated() {
+ let urlHost = url.host ?? "URL"
+
+ // Check for duplicates on the main actor
+ let isDuplicate = await MainActor.run {
+ model.pendingContextList.contains { context in
+ if case .webAuto(let appName, _, _) = context {
+ return appName == "Web: \(urlHost)"
+ }
+ if case .auto(let appName, _) = context {
+ return appName == "Web: \(urlHost)"
+ }
+ return false
+ }
+ }
+
+ if isDuplicate {
+ continue
+ }
+
+ await MainActor.run {
+ model.pendingContextList.append(.loading(urlHost))
+ }
+
+ do {
+ // Show loading indicator or feedback
+ // You could add a loading indicator here if needed
+
+ // Scrape content from the URL
+ let result = try await URLDetector.scrapeContentAndMetadata(from: url)
+ let truncatedContent = result.content.count > maxContentLength
+ ? String(result.content.prefix(maxContentLength)) + "\n[Content truncated due to length...]"
+ : result.content
+
+ // Create a context with the web content
+ await MainActor.run {
+ // Remove loading context
+ model.pendingContextList.removeAll { context in
+ if case .loading(let host) = context {
+ return host == urlHost
+ }
+ return false
+ }
greptile
logic: Potential race condition if multiple URLs from same host are being processed simultaneously. Consider using a more unique identifier than just host.
diff block
});
};
+const handleOnlyEntsChanged = async ({
+ req,
+ res,
+ attachParams,
+ curCusProduct,
+}: {
+ req: any;
+ res: any;
+ attachParams: AttachParams;
+ curCusProduct: FullCusProduct;
+}) => {
+ const logger = req.logtail;
+ logger.info("Only entitlements changed, no need to update prices");
+
+ // Remove subscription from previous cus product
+ await CusProductService.update({
+ sb: req.sb,
+ cusProductId: curCusProduct.id,
+ updates: {
+ subscription_ids: [],
+ },
+ });
greptile
logic: Removing subscription IDs before creating new customer product could lead to a race condition if the creation fails. Consider using a transaction or updating both operations atomically.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously - changedFiles could be modified between null check and assignment
diff block
/* eslint-disable no-console */
+
+import { AsyncLocalStorage } from 'async_hooks';
+
export class ConsoleListener {
- private readonly originalConsole;
+ private static isInitialized = false;
+ private readonly originalConsole = {
+ log: console.log,
+ error: console.error,
+ warn: console.warn,
+ info: console.info,
+ debug: console.debug,
+ };
+ private readonly consoleAsyncLocalStrorage = new AsyncLocalStorage<{
+ callback: (type: string, message: any[]) => void;
+ }>();
- constructor() {
- this.originalConsole = {
- log: console.log,
- error: console.error,
- warn: console.warn,
- info: console.info,
- debug: console.debug,
- };
- }
+ run<T>(
+ callback: () => T,
+ { onConsole }: { onConsole: (type: string, message: any[]) => void },
+ ): T {
+ if (!ConsoleListener.isInitialized) {
+ this.intercept();
- intercept(callback: (type: string, message: any[]) => void) {
- Object.keys(this.originalConsole).forEach((method) => {
- console[method] = (...args: any[]) => {
- callback(method, args);
- };
- });
+ ConsoleListener.isInitialized = true;
+ }
greptile
logic: static initialization flag could lead to race conditions if multiple ConsoleListener instances are created simultaneously
diff block
import fetch from "node-fetch";
import { useEffect, useState } from "react";
+// Import the history functionality
+let commandHistory = [];
greptile
logic: Global mutable state can cause race conditions. Consider moving this into React state
diff block
+/// <reference types="bun" />
+/// <reference path="./svg.d.ts" />
+import type { Element } from "../src/types";
+
+import { rm } from "node:fs/promises";
+
+import { format } from "prettier";
+import { optimize } from "svgo";
+import { yiq } from "yiq";
+
+import prettierConfig from "../.prettierrc.json";
+import elementSvg from "../assets/element.svg";
+
+interface PubChemElements_all {
+ Table: {
+ Columns: {
+ Column: string[];
+ };
+ Row: {
+ Cell: string[];
+ }[];
+ };
+}
+
+const dataFile = Bun.file("assets/PubChemElements_all.json");
+let data: PubChemElements_all;
+try {
+ data = await dataFile.json();
+} catch (err) {
+ if (err.code !== "ENOENT") {
+ throw err;
+ }
+
+ const resp = await fetch("https://pubchem.ncbi.nlm.nih.gov/rest/pug/periodictable/JSON").then((resp) => resp.text());
+
+ await Bun.write("assets/PubChemElements_all.json", resp).then(() => {
+ console.log("Downloaded PubChemElements_all.json");
+ });
+
+ data = JSON.parse(resp);
+}
+
+const optimizedElementSvg = optimize(elementSvg).data;
+
+await rm("assets/elements", { force: true, recursive: true });
+await rm("src/elements.ts", { force: true });
+
+const elements: Element[] = [];
+let elementsTs = `
+import { Element } from "./types";
+
+export const elements: Record<string, Element> = {
+`;
+
+for (const { Cell } of data.Table.Row) {
+ // @ts-expect-error
+ const element: Element = {};
+ for (let i = 0; i < Cell.length; i++) {
+ element[data.Table.Columns.Column[i]] = Cell[i];
+ }
+ elements.push(element);
+ elementsTs += `${element.Symbol}: ${JSON.stringify(element)},`;
+
+ let textColor: string;
+ try {
+ textColor = yiq(`#${element.CPKHexColor}`);
+ } catch (err) {
+ console.warn(err);
+ textColor = "#FFF";
+ element.CPKHexColor = "000";
+ }
+
+ const svg = optimizedElementSvg
+ .replace(/#{TextColor}/g, textColor)
+ .replace(/{{ (\w+) }}/g, (match, key) => element[key]);
+
+ Bun.write(`assets/elements/${element.Symbol}.svg`, svg).then(() => {
+ console.log(`Generated ${element.Symbol}.svg`);
+ });
greptile
logic: File write operations are not awaited, which could lead to race conditions or incomplete writes
```suggestion
+ await Bun.write(`assets/elements/${element.Symbol}.svg`, svg);
console.log(`Generated ${element.Symbol}.svg`);
```
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple processes call listChangedFiles() simultaneously since changedFiles is a global variable
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen while changedFiles is being populated
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously - changedFiles could be modified between null check and assignment
diff block
+use base64::{engine::general_purpose, Engine};
+use std::sync::Arc;
+use std::time::{Duration, SystemTime, UNIX_EPOCH};
+
+use crate::constants::{
+ MAX_NEGATIVE_TIMEZONE_HOURS, MAX_POSITIVE_TIMEZONE_HOURS, SALT_TTL_SECONDS,
+};
+use crate::metrics::metrics_consts::{
+ COOKIELESS_CACHE_HIT_COUNTER, COOKIELESS_CACHE_MISS_COUNTER, COOKIELESS_REDIS_ERROR_COUNTER,
+};
+use common_metrics::inc;
+use common_redis::{Client as RedisClient, CustomRedisError};
+use moka::sync::Cache;
+use rand::RngCore;
+use thiserror::Error;
+
+#[derive(Debug, Error, PartialEq)]
+pub enum SaltCacheError {
+ #[error("Date is out of range")]
+ DateOutOfRange,
+ #[error("Redis error: {0}")]
+ RedisError(String),
+ #[error("Failed to get salt from redis")]
+ SaltRetrievalFailed,
+}
+
+impl From<CustomRedisError> for SaltCacheError {
+ fn from(err: CustomRedisError) -> Self {
+ inc(
+ COOKIELESS_REDIS_ERROR_COUNTER,
+ &[("operation".to_string(), "from_redis_error".to_string())],
+ 1,
+ );
+ SaltCacheError::RedisError(err.to_string())
+ }
+}
+
+/// SaltCache manages the local cache of salts used for cookieless hashing
+/// using the Moka synchronous caching library for efficient TTL handling
+pub struct SaltCache {
+ /// Cache of salts, keyed by YYYY-MM-DD
+ cache: Cache<String, Vec<u8>>,
+ /// Redis client for fetching and storing salts
+ redis_client: Arc<dyn RedisClient + Send + Sync>,
+}
+
+impl SaltCache {
+ /// Create a new SaltCache with the given Redis client
+ pub fn new(
+ redis_client: Arc<dyn RedisClient + Send + Sync>,
+ salt_ttl_seconds: Option<u64>,
+ ) -> Self {
+ // Create a cache with a maximum of 1000 entries
+ // This is more than enough for our use case, as we only store one salt per day
+ let cache = Cache::builder()
+ // Set TTL to the salt TTL
+ .time_to_live(Duration::from_secs(
+ salt_ttl_seconds.unwrap_or(SALT_TTL_SECONDS),
+ ))
+ // Build the cache
+ .build();
+
+ SaltCache {
+ cache,
+ redis_client,
+ }
+ }
+
+ /// Get the salt for a specific day (YYYY-MM-DD format)
+ pub async fn get_salt_for_day(&self, yyyymmdd: &str) -> Result<Vec<u8>, SaltCacheError> {
+ // Validate the date format
+ if !is_calendar_date_valid(yyyymmdd) {
+ return Err(SaltCacheError::DateOutOfRange);
+ }
+
+ // Check if we have the salt in the cache
+ if let Some(salt) = self.cache.get(yyyymmdd) {
+ inc(
+ COOKIELESS_CACHE_HIT_COUNTER,
+ &[
+ ("operation".to_string(), "getSaltForDay".to_string()),
+ ("day".to_string(), yyyymmdd.to_string()),
+ ],
+ 1,
+ );
+ return Ok(salt);
+ }
+
+ inc(
+ COOKIELESS_CACHE_MISS_COUNTER,
+ &[
+ ("operation".to_string(), "getSaltForDay".to_string()),
+ ("day".to_string(), yyyymmdd.to_string()),
+ ],
+ 1,
+ );
+
+ // Try to get it from Redis
+ let redis_key = format!("cookieless_salt:{yyyymmdd}");
+ let salt_base64 = match self.redis_client.get(redis_key.clone()).await {
+ Ok(value) => Some(value),
+ Err(CustomRedisError::NotFound) => None,
+ Err(e) => {
+ inc(
+ COOKIELESS_REDIS_ERROR_COUNTER,
+ &[
+ ("operation".to_string(), "get_salt".to_string()),
+ ("day".to_string(), yyyymmdd.to_string()),
+ ],
+ 1,
+ );
+ return Err(SaltCacheError::RedisError(e.to_string()));
+ }
+ };
+
+ if let Some(salt_base64) = salt_base64 {
+ // Decode the base64 salt
+ let salt = match general_purpose::STANDARD.decode(salt_base64) {
+ Ok(s) => s,
+ Err(_) => {
+ inc(
+ COOKIELESS_REDIS_ERROR_COUNTER,
+ &[
+ ("operation".to_string(), "decode_salt".to_string()),
+ ("day".to_string(), yyyymmdd.to_string()),
+ ],
+ 1,
+ );
+ return Err(SaltCacheError::SaltRetrievalFailed);
+ }
+ };
+
+ // Store it in the cache
+ self.cache.insert(yyyymmdd.to_string(), salt.clone());
+
+ return Ok(salt);
+ }
+
+ // Generate a new salt
+ let mut new_salt = vec![0u8; 16];
+ rand::thread_rng().fill_bytes(&mut new_salt);
+ let new_salt_base64 = general_purpose::STANDARD.encode(&new_salt);
+
+ // Try to set it in Redis with NX (only if it doesn't exist)
+ // Note: This is a simplified version as the Redis client doesn't have setnx
+ // In a real implementation, you'd want to use a Redis transaction or Lua script
+ match self
greptile
logic: Race condition handling is incomplete. Using a separate key with ':nx' suffix doesn't guarantee atomicity. Should use Redis SETNX command or a Lua script for proper locking.
diff block
interface AuthedState {
repos: Repo[];
+ currentRepo: Repo | null;
user?: User;
loading: boolean;
setRepos: (r: Repo[]) => void;
+ setCurrentRepo: (r: Repo) => void;
setUser: (u: User) => void;
setLoading: (loading: boolean) => void;
reset: () => void;
}
export const useZustandStore = create<AuthedState>((set, get) => ({
+ currentRepo: null,
repos: [],
loading: true,
+ setCurrentRepo: (r) => {
+ set((state) => ({
+ currentRepo: r,
+ loading: false,
+ }));
+ },
greptile
style: setting loading: false in individual setters could cause race conditions if multiple setters are called in sequence
diff block
if (msg.type === "ready") {
this.#state.status = { type: "ready" };
- this.#state.rpcs = msg.data.rpcs;
- this.#state.state = {
- ...msg.data.state,
- json: JSON.parse(msg.data.state.native),
- };
- this.#state.connections = msg.data.connections;
- this.#update();
}
greptile
logic: Setting ready status but not updating state data here creates a race condition. The ready handler should be combined with the inspect handler below.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: listChangedFiles() is now awaited but changedFiles is spread synchronously, which could cause race conditions if listChangedFiles returns a Promise of an async iterator
diff block
+import { COMMAND_MENU_COMPONENT_INSTANCE_ID } from '@/command-menu/constants/CommandMenuComponentInstanceId';
+import { COMMAND_MENU_CONTEXT_CHIP_GROUPS_DROPDOWN_ID } from '@/command-menu/constants/CommandMenuContextChipGroupsDropdownId';
+import { COMMAND_MENU_PREVIOUS_COMPONENT_INSTANCE_ID } from '@/command-menu/constants/CommandMenuPreviousComponentInstanceId';
+import { useResetContextStoreStates } from '@/command-menu/hooks/useResetContextStoreStates';
+import { commandMenuNavigationMorphItemByPageState } from '@/command-menu/states/commandMenuNavigationMorphItemsState';
+import { commandMenuNavigationRecordsState } from '@/command-menu/states/commandMenuNavigationRecordsState';
+import { commandMenuNavigationStackState } from '@/command-menu/states/commandMenuNavigationStackState';
+import { commandMenuPageInfoState } from '@/command-menu/states/commandMenuPageInfoState';
+import { commandMenuPageState } from '@/command-menu/states/commandMenuPageState';
+import { commandMenuSearchState } from '@/command-menu/states/commandMenuSearchState';
+import { hasUserSelectedCommandState } from '@/command-menu/states/hasUserSelectedCommandState';
+import { isCommandMenuClosingState } from '@/command-menu/states/isCommandMenuClosingState';
+import { isCommandMenuOpenedState } from '@/command-menu/states/isCommandMenuOpenedState';
+import { CommandMenuPages } from '@/command-menu/types/CommandMenuPages';
+import { viewableRecordIdState } from '@/object-record/record-right-drawer/states/viewableRecordIdState';
+import { useDropdownV2 } from '@/ui/layout/dropdown/hooks/useDropdownV2';
+import { emitRightDrawerCloseEvent } from '@/ui/layout/right-drawer/utils/emitRightDrawerCloseEvent';
+import { useSelectableList } from '@/ui/layout/selectable-list/hooks/useSelectableList';
+import { usePreviousHotkeyScope } from '@/ui/utilities/hotkey/hooks/usePreviousHotkeyScope';
+import { useRecoilCallback } from 'recoil';
+
+export const useCommandMenuCloseAnimationCompleteCleanup = () => {
+ const { resetSelectedItem } = useSelectableList('command-menu-list');
+
+ const { goBackToPreviousHotkeyScope } = usePreviousHotkeyScope();
+
+ const { resetContextStoreStates } = useResetContextStoreStates();
+
+ const { closeDropdown } = useDropdownV2();
+
+ const commandMenuCloseAnimationCompleteCleanup = useRecoilCallback(
+ ({ set }) =>
+ () => {
+ closeDropdown(COMMAND_MENU_CONTEXT_CHIP_GROUPS_DROPDOWN_ID);
+
+ resetContextStoreStates(COMMAND_MENU_COMPONENT_INSTANCE_ID);
+ resetContextStoreStates(COMMAND_MENU_PREVIOUS_COMPONENT_INSTANCE_ID);
+
+ set(viewableRecordIdState, null);
+ set(commandMenuPageState, CommandMenuPages.Root);
+ set(commandMenuPageInfoState, {
+ title: undefined,
+ Icon: undefined,
+ instanceId: '',
+ });
+ set(isCommandMenuOpenedState, false);
+ set(commandMenuSearchState, '');
+ set(commandMenuNavigationMorphItemByPageState, new Map());
+ set(commandMenuNavigationRecordsState, []);
+ set(commandMenuNavigationStackState, []);
+ resetSelectedItem();
+ set(hasUserSelectedCommandState, false);
+ goBackToPreviousHotkeyScope();
+
+ emitRightDrawerCloseEvent();
+ set(isCommandMenuClosingState, false);
greptile
logic: isCommandMenuClosingState should be set to false before emitting close event to prevent race conditions
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
style: Cache initialization happens after await, which could lead to race conditions if multiple runs occur simultaneously
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible here - changedFiles could be modified between null check and assignment
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state
diff block
personalAccessToken: notion_token,
extraParameters: { owner: "user" },
onAuthorize({ token }) {
- notion = new Client({ auth: token });
+ notion = new Client({
+ auth: token,
+ timeoutMs: 10000 // 10 second timeout
+ });
},
});
export function getNotionClient() {
if (!notion) {
- throw new Error("No Notion client initialized");
+ if (notion_token) {
+ notion = new Client({
+ auth: notion_token,
+ timeoutMs: 10000 // 10 second timeout
+ });
+ } else {
+ throw new Error("No Notion client initialized and no token available");
+ }
}
-
return notion;
}
+
+// Export a function to get the token for AI tools
+export function getNotionToken() {
+ return notion_token;
+}
+
+// Wrapper function to handle token and client initialization for all tools
+export async function withNotionClient<T>(fn: (client: Client) => Promise<T>): Promise<T> {
+ const token = getNotionToken();
+ if (!token) {
+ throw new Error("No Notion token available");
+ }
+
+ const client = new Client({ auth: token });
+ return fn(client);
+}
greptile
style: Consider reusing the existing notion client instance instead of creating a new one to maintain consistency and avoid potential race conditions
diff block
+import { useEffect, useState } from "react";
+import { Alert, AlertType, ContentType, ContentTypeNames, ContentTypeURLMap } from "./types/alert";
+import { AlertsService } from "./services/alertsService";
+import { showFailureToast, useCachedPromise } from "@raycast/utils";
+import { UserUtils } from "./utils/userUtils";
+import { Clipboard, Color, Icon, MenuBarExtra, open, showToast, Toast } from "@raycast/api";
+import { formatRelativeDate } from "./utils/dateUtils";
+
+export default function Command() {
+ const [enrichedAlerts, setEnrichedAlerts] = useState<Alert[]>([]);
+
+ const {
+ data: alertResponse,
+ isLoading,
+ revalidate,
+ } = useCachedPromise(() => AlertsService.fetchAlerts(), [], {
+ keepPreviousData: false,
+ initialData: [],
+ });
+
+ useEffect(() => {
+ async function enrichAlerts() {
+ const alerts = Array.isArray(alertResponse) ? alertResponse : [];
+ if (alerts.length === 0) return;
+
+ // Filter out read alerts first
+ const unreadAlerts = alerts.filter((alert) => !alert.read);
+ const enriched = [...unreadAlerts];
+
+ // Process alerts in batches
+ const batchSize = 5;
+ for (let i = 0; i < enriched.length; i += batchSize) {
+ const batch = enriched.slice(i, i + batchSize);
+
+ await Promise.all(
+ batch.map(async (alert, index) => {
+ try {
+ if (alert.caused_member_id !== 0) {
+ const username = await UserUtils.idToUsername(alert.caused_member_id);
+ enriched[i + index] = { ...alert, username };
+ } else {
+ enriched[i + index] = { ...alert, username: "Guest User" };
+ }
+ } catch (error) {
+ console.error(`Error fetching username for ID ${alert.caused_member_id}:`, error);
+ }
+ }),
+ );
+ }
+ setEnrichedAlerts(enriched);
+ }
+ enrichAlerts();
+ }, [alertResponse]);
+
+ const handleMarkAllAsRead = async () => {
+ try {
+ const success = await AlertsService.markAllAsRead();
+ if (success) {
+ await showToast(Toast.Style.Success, "Marked all notifications as read");
+ await UserUtils.clearCache();
+ revalidate();
+ } else {
+ await showFailureToast("Failed to mark notifications as read", {
+ title: "Failed to mark notifications as read",
+ });
+ }
+ } catch (error) {
+ await showFailureToast(error, { title: "Error marking notifications as read", message: String(error) });
+ }
+ };
+
+ const getAlertMessage = (alert: Alert) => {
+ const username = alert.username || `User #${alert.caused_member_id}`;
+ const contentTypeName = ContentTypeNames[alert.content_type as ContentType] || "content";
+
+ switch (alert.alert_type) {
+ case AlertType.REACTION:
+ return `${username} reacted to your ${contentTypeName}`;
+ case AlertType.REPLY:
+ return `${username} replied to your ${contentTypeName}`;
+ case AlertType.TICKET_MOVED:
+ return `Your ticket has been moved`;
+ case AlertType.MENTION:
+ return `${username} mentioned you in a ${contentTypeName}`;
+ default:
+ return `New notification from ${username}`;
+ }
+ };
+
+ const handleDebug = async () => {
+ console.log("Debug", alertResponse);
+ await Clipboard.copy(JSON.stringify(alertResponse, null, 2));
+ await showToast(Toast.Style.Success, "Debug information copied to clipboard. Please send this to Geek");
+ };
+
+ const getContentUrl = (alert: Alert) => {
+ const baseUrl = ContentTypeURLMap[alert.content_type as ContentType];
+ if (alert.content_type === ContentType.WIKI) {
+ return `${baseUrl}`;
+ }
+ return `${baseUrl}/${alert.content_id}`;
+ };
+
+ const handleRefresh = async () => {
+ await UserUtils.clearCache();
+ revalidate();
+ };
+
+ const unreadCount = enrichedAlerts.filter((a) => !a.read).length;
+
+ return (
+ <MenuBarExtra
+ icon={
+ unreadCount > 0
+ ? { source: "../assets/bbb-icon.png" }
+ : { source: "../assets/bbb-icon.png", tintColor: Color.SecondaryText }
+ }
+ title={unreadCount > 0 ? String(unreadCount) : "0"}
+ isLoading={isLoading}
+ >
+ <MenuBarExtra.Section title={unreadCount > 0 ? "Notifications" : "No Unread Notifications"}>
+ {enrichedAlerts.map((alert, index) => (
+ <MenuBarExtra.Item
+ key={index}
+ title={getAlertMessage(alert)}
+ onAction={() => {
+ open(getContentUrl(alert));
+ alert.read = true;
+ revalidate();
greptile
logic: mutating alert.read directly and then calling revalidate could lead to race conditions - consider using proper state updates
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple instances run simultaneously due to shared changedFiles variable. Consider passing state through parameters instead.
diff block
+import fs from "fs";
+import path from "path";
+import { environment } from "@raycast/api";
+
+/**
+ * Safely clean up temporary files
+ */
+export function cleanupTempFile(filePath: string | null): void {
+ if (filePath && fs.existsSync(filePath)) {
greptile
style: existsSync followed by unlinkSync creates a race condition. Consider using try-catch without the check.
suggested fix
+ if (filePath) {
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
greptile
logic: Cache initialization happens after awaiting listChangedFiles, creating a race condition if multiple calls happen simultaneously
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen while changedFiles is being populated
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state
diff block
try:
yield conn
except Exception:
- self.remove(conn)
+ self._remove_tasks.add(weakref.ref(asyncio.create_task(self.remove(conn))))
raise
greptile
logic: Potential race condition: task reference could be garbage collected before task completes since only weak reference is stored
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
greptile
logic: Race condition possible here - changedFiles could be modified between null check and assignment
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state. Consider passing state as parameters.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: listChangedFiles() is now awaited but changedFiles is still mutated globally. This could cause race conditions in concurrent executions.
diff block
+// Polyfill for fetch API in Node.js environment
+import nodeFetch from "node-fetch";
+// @ts-expect-error - Polyfill for fetch in Node.js
+global.fetch = nodeFetch;
+
+import fs from "fs/promises";
+import path from "path";
+import { getPreferenceValues, showToast, Toast } from "@raycast/api";
+import { GoogleGenerativeAI } from "@google/generative-ai";
+
+interface Preferences {
+ geminiApiKey: string;
+ batchSize: string;
+}
+
+// Get user preferences
+const preferences = getPreferenceValues<Preferences>();
+
+// Initialize Gemini client
+const geminiAI = preferences.geminiApiKey ? new GoogleGenerativeAI(preferences.geminiApiKey) : null;
+
+interface RenameResult {
+ originalPath: string;
+ newPath: string;
+ success: boolean;
+ error?: string;
+}
+
+export async function renameScreenshots(filePaths: string[]): Promise<RenameResult[]> {
+ // Process files in parallel with a concurrency limit based on user preferences
+ const concurrencyLimit = parseInt(preferences.batchSize || "3"); // Default to 3 if not set
+ const batchResults = [];
+
+ // Process files in batches
+ for (let i = 0; i < filePaths.length; i += concurrencyLimit) {
+ const batch = filePaths.slice(i, i + concurrencyLimit);
+ const batchPromises = batch.map((filePath) => renameScreenshot(filePath));
+ batchResults.push(...(await Promise.all(batchPromises)));
+ }
+
+ return batchResults;
+}
+
+async function renameScreenshot(filePath: string): Promise<RenameResult> {
+ try {
+ // Generate a descriptive name for the screenshot
+ const newFileName = await generateScreenshotName(filePath);
+
+ // Create the new file path with the generated name
+ const directory = path.dirname(filePath);
+ const extension = path.extname(filePath);
+ const newFilePath = path.join(directory, `${newFileName}${extension}`);
+
+ // Check if the new filename already exists
+ try {
+ await fs.access(newFilePath);
+ // If we get here, file exists, so add a timestamp to make it unique
+ const timestamp = Date.now();
+ const newUniqueFilePath = path.join(directory, `${newFileName}_${timestamp}${extension}`);
+ await fs.rename(filePath, newUniqueFilePath);
+
+ return {
+ originalPath: filePath,
+ newPath: newUniqueFilePath,
+ success: true,
+ };
+ } catch (error) {
greptile
logic: race condition possible between fs.access check and fs.rename - should use try/catch around rename instead
```suggestion
try {
+ await fs.rename(filePath, newFilePath);
return {
originalPath: filePath,
+ newPath: newFilePath,
success: true,
};
} catch (error) {
+ // If file exists, add timestamp and try again
+ if ((error as NodeJS.ErrnoException).code === 'EEXIST') {
const timestamp = Date.now();
const newUniqueFilePath = path.join(directory, `${newFileName}_${timestamp}${extension}`);
await fs.rename(filePath, newUniqueFilePath);
return {
originalPath: filePath,
newPath: newUniqueFilePath,
success: true,
};
}
+ throw error;
}
```
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple concurrent runs modify changedFiles. Consider using a lock or making changedFiles function-scoped.
diff block
self._playing_speech.cancel(cancel_nested=True)
# Stop current LLM stream
- logger.info(f"cancelling agent reply task: {self._agent_reply_task}")
- if self._agent_reply_task is not None:
- self._agent_reply_task.cancel()
- if self._pending_agent_reply is not None:
- self._pending_agent_reply.cancel()
+ # logger.info(f"cancelling agent reply task: {self._agent_reply_task}")
+ # if self._agent_reply_task is not None:
+ # self._agent_reply_task.cancel()
+ # if self._pending_agent_reply is not None:
+ # self._pending_agent_reply.cancel()
greptile
logic: Removing cancellation of agent reply tasks could cause memory leaks and race conditions if multiple interruptions occur
diff block
+import React, { useState, useEffect } from "react";
+import {
+ ActionPanel,
+ List,
+ Action,
+ showToast,
+ Toast,
+ Icon,
+ Detail,
+ useNavigation,
+ confirmAlert,
+ Form,
+} from "@raycast/api";
+import { SalesforceService, MemoFileService, SalesforceRecord } from "./utils/salesforce";
+import path from "path";
+import fs from "fs";
+
+interface MemoItem {
+ title: string;
+ path: string;
+ metadata: {
+ sfId?: string;
+ sfName?: string;
+ sfType?: string;
+ createdAt?: string;
+ };
+}
+
+export default function ViewMemos() {
+ const [isLoading, setIsLoading] = useState(true);
+ const [memos, setMemos] = useState<MemoItem[]>([]);
+ const { push } = useNavigation();
+ const memoFileService = new MemoFileService();
+
+ useEffect(() => {
+ loadMemos();
+ }, []);
+
+ const loadMemos = async () => {
+ setIsLoading(true);
+ try {
+ const memoPaths = memoFileService.listMemos();
+ const memoItems: MemoItem[] = [];
+
+ for (const memoPath of memoPaths) {
+ try {
+ const { content, metadata } = memoFileService.readMemo(memoPath);
+ const filename = path.basename(memoPath);
+
+ // マークダウンからタイトルを抽出 (最初の# で始まる行)
+ // タイトル抽出を改善:複数行にまたがる場合も考慮
+ let title = filename;
+ const titleMatch = content.match(/^#\s+(.+)$/m);
+ if (titleMatch && titleMatch[1]) {
+ title = titleMatch[1].trim();
+ // タイトルに不正な文字がないか確認
+ if (title.length === 0) {
+ title = filename;
+ }
+ // コンソールにタイトルのバイト表現も出力(デバッグ用)
+ console.log(`タイトル「${title}」のバイト長: ${Buffer.from(title).length}`);
+ }
+
+ memoItems.push({
+ title,
+ path: memoPath,
+ metadata: metadata as any,
+ });
+ } catch (error) {
+ console.error(`Failed to read memo ${memoPath}:`, error);
+ }
+ }
+
+ // 作成日時の新しい順に並べ替え
+ memoItems.sort((a, b) => {
+ const dateA = a.metadata.createdAt ? new Date(a.metadata.createdAt).getTime() : 0;
+ const dateB = b.metadata.createdAt ? new Date(b.metadata.createdAt).getTime() : 0;
+ return dateB - dateA;
+ });
+
+ setMemos(memoItems);
+ } catch (error) {
+ console.error("メモ読み込みエラー:", error);
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "エラー",
+ message: "メモの読み込み中にエラーが発生しました",
+ });
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const deleteMemo = async (memo: MemoItem) => {
+ const confirmed = await confirmAlert({
+ title: "メモを削除",
+ message: `"${memo.title}" を削除しますか?`,
+ primaryAction: {
+ title: "削除",
+ },
+ });
+
+ if (confirmed) {
+ try {
+ fs.unlinkSync(memo.path);
+ await showToast({
+ style: Toast.Style.Success,
+ title: "メモを削除しました",
+ });
+ loadMemos();
+ } catch (error) {
+ console.error("メモ削除エラー:", error);
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "エラー",
+ message: "メモの削除中にエラーが発生しました",
+ });
+ }
+ }
+ };
+
+ return (
+ <List isLoading={isLoading} searchBarPlaceholder="メモを検索">
+ {memos.map((memo) => (
+ <List.Item
+ key={memo.path}
+ title={memo.title}
+ subtitle={memo.metadata.sfName ? `関連レコード: ${memo.metadata.sfType} - ${memo.metadata.sfName}` : ""}
+ accessoryTitle={memo.metadata.createdAt ? new Date(memo.metadata.createdAt).toLocaleString() : ""}
+ actions={
+ <ActionPanel>
+ <Action title="メモを表示" icon={Icon.Eye} onAction={() => push(<MemoDetail memo={memo} />)} />
+ <Action title="メモを編集" icon={Icon.Pencil} onAction={() => push(<EditMemo memo={memo} />)} />
+ <Action title="メモを削除" icon={Icon.Trash} onAction={() => deleteMemo(memo)} />
+ </ActionPanel>
+ }
+ />
+ ))}
+ </List>
+ );
+}
+
+function MemoDetail({ memo }: { memo: MemoItem }) {
+ const [isUploading, setIsUploading] = useState(false);
+ const salesforceService = new SalesforceService();
+ const memoFileService = new MemoFileService();
+ const { push } = useNavigation();
+
+ // メモの内容を読み込む(JSON形式)
+ const { originalData } = memoFileService.readMemo(memo.path);
+
+ // JSONデータを整形して表示用コンテンツを作成
+ const createMarkdownContent = () => {
+ if (!originalData) {
+ return "メモデータを読み込めませんでした";
+ }
+
+ const title = originalData.title || "タイトルなし";
+ const content = originalData.content || "";
+ const metadata = originalData.metadata || {};
+ const syncStatus = originalData.syncStatus || { lastSyncedAt: null, sfNoteId: null };
+
+ // メタデータセクション
+ let metadataSection = "";
+ if (metadata.sfId) {
+ metadataSection += `\n\n## 関連レコード情報\n`;
+ metadataSection += `- **タイプ**: ${metadata.sfType || "不明"}\n`;
+ metadataSection += `- **名前**: ${metadata.sfName || "不明"}\n`;
+ metadataSection += `- **ID**: ${metadata.sfId}\n`;
+ }
+
+ // 作成・更新日時
+ let dateSection = "\n\n## 日時情報\n";
+ if (metadata.createdAt) {
+ dateSection += `- **作成日時**: ${new Date(metadata.createdAt).toLocaleString()}\n`;
+ }
+ if (metadata.updatedAt) {
+ dateSection += `- **更新日時**: ${new Date(metadata.updatedAt).toLocaleString()}\n`;
+ }
+
+ // 同期情報
+ let syncSection = "";
+ if (syncStatus.lastSyncedAt) {
+ syncSection += `\n\n## Salesforce同期情報\n`;
+ syncSection += `- **最終同期**: ${new Date(syncStatus.lastSyncedAt).toLocaleString()}\n`;
+ syncSection += `- **Salesforce ID**: ${syncStatus.sfNoteId || "不明"}\n`;
+ }
+
+ return `# ${title}\n\n${content}${metadataSection}${dateSection}${syncSection}`;
+ };
+
+ const markdownContent = createMarkdownContent();
+
+ const uploadToSalesforce = async () => {
+ setIsUploading(true);
+ try {
+ if (!originalData) {
+ throw new Error("メモデータが読み込めません");
+ }
+
+ console.log("Salesforce送信前のメモデータ:", originalData);
+
+ // 送信するタイトルと本文を取得
+ const title = originalData.title || memo.title;
+ const content = originalData.content || "";
+ const metadata = originalData.metadata || {};
+
+ // Salesforceにメモを作成
+ const memoId = await salesforceService.createMemoRecord(title, content, metadata.sfId);
+
+ // 送信成功後、同期ステータスを更新
+ const updated = memoFileService.updateSyncStatus(memo.path, memoId);
+ if (updated) {
+ console.log("同期ステータスを更新しました:", memoId);
+ }
+
+ await showToast({
+ style: Toast.Style.Success,
+ title: "メモをSalesforceに送信しました",
+ message: `メモID: ${memoId}`,
+ });
+ } catch (error) {
+ console.error("Salesforce送信エラー:", error);
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "送信エラー",
+ message: "Salesforceへのメモ送信中にエラーが発生しました",
+ });
+ } finally {
+ setIsUploading(false);
+ }
+ };
+
+ return (
+ <Detail
+ markdown={markdownContent}
+ isLoading={isUploading}
+ actions={
+ <ActionPanel>
+ <Action title="Salesforceに送信" icon={Icon.Upload} onAction={uploadToSalesforce} />
+ <Action title="メモを編集" icon={Icon.Pencil} onAction={() => push(<EditMemo memo={memo} />)} />
+ </ActionPanel>
+ }
+ />
+ );
+}
+
+// メモ編集コンポーネント
+function EditMemo({ memo }: { memo: MemoItem }) {
+ const memoFileService = new MemoFileService();
+ const salesforceService = new SalesforceService();
+ const { pop, push } = useNavigation();
+ const [isLoading, setIsLoading] = useState(false);
+ const [relatedRecord, setRelatedRecord] = useState<SalesforceRecord | undefined>(undefined);
+
+ // メモの内容を読み込む
+ const { content, metadata, originalData } = memoFileService.readMemo(memo.path);
+
+ // 元のJSONデータがある場合はそれを使用
+ const jsonData = originalData || {
+ title: memo.title,
+ content: "",
+ metadata: metadata,
+ };
+
+ // 初期値設定
+ const [title, setTitle] = useState(jsonData.title || memo.title);
+ const [memoContent, setMemoContent] = useState(jsonData.content || "");
+
+ // 関連レコードの初期設定
+ useEffect(() => {
+ // メタデータから関連レコード情報を取得
+ if (metadata.sfId && metadata.sfName && metadata.sfType) {
+ setRelatedRecord({
+ Id: metadata.sfId,
+ Name: metadata.sfName,
+ Type: metadata.sfType,
+ });
+ }
+ }, [metadata]);
+
+ // レコード検索画面を表示
+ const handleRecordSelect = () => {
+ push(<RecordSearch onRecordSelect={selectRecord} />);
+ };
+
+ // レコード選択時の処理
+ const selectRecord = (record: SalesforceRecord) => {
+ console.log("メモ編集画面でレコード設定:", record);
+ setRelatedRecord(record);
+ };
+
+ // 関連レコードをクリア
+ const clearRelatedRecord = () => {
+ setRelatedRecord(undefined);
+ };
+
+ const handleSubmit = async (values: { title: string; content: string }) => {
+ if (!values.title || !values.content) {
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "入力エラー",
+ message: "タイトルと内容を入力してください",
+ });
+ return;
+ }
+
+ setIsLoading(true);
+ try {
+ // 元のJSONデータを更新
+ const updatedData = {
+ ...jsonData,
+ title: values.title,
+ content: values.content,
+ metadata: {
+ ...(jsonData.metadata || {}),
+ updatedAt: new Date().toISOString(),
+ },
+ };
+
+ // 関連レコード情報の更新
+ if (relatedRecord) {
+ updatedData.metadata = {
+ ...updatedData.metadata,
+ sfId: relatedRecord.Id,
+ sfName: relatedRecord.Name,
+ sfType: relatedRecord.Type,
+ };
+ } else {
+ // 関連レコードがクリアされた場合、関連情報も削除
+ if (updatedData.metadata.sfId) {
+ delete updatedData.metadata.sfId;
+ delete updatedData.metadata.sfName;
+ delete updatedData.metadata.sfType;
+ }
+ }
+
+ // JSON文字列に変換
+ const fileContent = JSON.stringify(updatedData, null, 2);
+
+ // 同じファイルパスに上書き保存
+ fs.writeFileSync(memo.path, fileContent, { encoding: "utf8" });
+
+ // ファイル書き込み後に確認
+ console.log(`メモ更新完了: ${memo.path} (JSON形式)`);
+
+ await showToast({
+ style: Toast.Style.Success,
+ title: "メモを更新しました",
+ message: "変更を保存しました",
+ });
+
+ // 前の画面に戻る
+ pop();
+ } catch (error) {
+ console.error("メモ更新エラー:", error);
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "エラー",
+ message: "メモの更新中にエラーが発生しました",
+ });
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ // 現在の関連レコード表示用テキスト
+ const relatedRecordText = relatedRecord ? `${relatedRecord.Type}: ${relatedRecord.Name}` : "なし";
+
+ return (
+ <Form
+ isLoading={isLoading}
+ actions={
+ <ActionPanel>
+ <Action.SubmitForm title="メモを保存" onSubmit={handleSubmit} icon={Icon.Document} />
+ <Action title="関連レコードを選択" onAction={handleRecordSelect} icon={Icon.Link} />
+ {relatedRecord && <Action title="関連レコードをクリア" onAction={clearRelatedRecord} icon={Icon.Trash} />}
+ </ActionPanel>
+ }
+ >
+ <Form.TextField
+ id="title"
+ title="タイトル"
+ placeholder="メモのタイトルを入力"
+ value={title}
+ onChange={setTitle}
+ autoFocus
+ />
+ <Form.TextArea
+ id="content"
+ title="内容"
+ placeholder="メモの内容を入力"
+ value={memoContent}
+ onChange={setMemoContent}
+ />
+ <Form.Description title="関連レコード" text={relatedRecordText} />
+ </Form>
+ );
+}
+
+// レコード検索コンポーネント (view-memosにも追加)
+function RecordSearch({ onRecordSelect }: { onRecordSelect: (record: SalesforceRecord) => void }) {
+ const [searchText, setSearchText] = useState("");
+ const [isLoading, setIsLoading] = useState(false);
+ const [records, setRecords] = useState<SalesforceRecord[]>([]);
+ const salesforceService = new SalesforceService();
+ const { pop } = useNavigation();
+
+ const searchRecords = async () => {
+ if (!searchText || searchText.length < 2) return;
+
+ setIsLoading(true);
+ try {
+ const credentials = await salesforceService.getCredentials();
+ if (!credentials) {
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "認証エラー",
+ message: "Salesforceの認証情報が設定されていません",
+ });
+ return;
+ }
+
+ const searchResults = await salesforceService.searchRecords(searchText);
+ setRecords(searchResults);
+ } catch (error) {
+ console.error("レコード検索エラー:", error);
+ await showToast({
+ style: Toast.Style.Failure,
+ title: "検索エラー",
+ message: "レコードの検索中にエラーが発生しました",
+ });
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ React.useEffect(() => {
+ const delaySearch = setTimeout(() => {
+ if (searchText.length >= 2) {
+ searchRecords();
+ }
+ }, 500);
+
+ return () => clearTimeout(delaySearch);
+ }, [searchText]);
+
+ const handleRecordSelection = async (record: SalesforceRecord) => {
+ console.log("レコード選択処理開始:", record);
+ try {
+ // レコード選択実行
+ onRecordSelect(record);
+ console.log("レコード選択完了");
+
+ // 選択成功メッセージ
+ await showToast({
+ style: Toast.Style.Success,
+ title: "レコードを選択しました",
+ message: `[${record.Type}] ${record.Name}`,
+ });
+
+ // 少し待機して確実にレコード情報が設定されるようにする
+ setTimeout(() => {
+ // 前の画面(メモ編集画面)に戻る
+ console.log("前の画面に戻ります");
+ pop();
+ }, 300);
+ } catch (error) {
greptile
logic: Using setTimeout for state synchronization can cause race conditions. Consider using state management or callbacks
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: potential race condition if multiple calls happen simultaneously while changedFiles is null
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen while changedFiles is being populated
diff block
selectedRole: IRole | null;
setSelectedRole: Dispatch<SetStateAction<IRole | null>>;
}) => {
- const { getRoles, roles, createRole, createRoleLoading, deleteRole, updateRole } = useRoles();
+ const { roles, createRole, createRoleLoading, deleteRole, updateRole, setRoles } = useRoles();
const [filterValue, setFilterValue] = useState<string>('');
const [editRole, setEditRole] = useState<IRole | null>(null);
const handleEdit = (role: IRole) => {
setEditRole(role);
};
- useEffect(() => {
- getRoles();
- }, [getRoles]);
-
// CREATE
const handleCreateRole = useCallback(async () => {
if (filterValue.length) {
- await createRole({
+ const res = await createRole({
name: filterValue
});
+
+ if (res) {
+ // Update roles state
+ setRoles([...roles, res.data]);
+ }
greptile
style: spreading roles array with new data could cause race conditions if multiple roles are created quickly
suggested fix
if (res) {
// Update roles state
+ setRoles(prev => [...prev, res.data]);
}
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state. Consider passing state through parameters instead.
diff block
import axios from "axios";
-import Bonjour, { RemoteService } from "bonjour";
+import BonjourService from "bonjour-service";
import { waitUntil } from "./utils";
-import { getPreferenceValues } from "@raycast/api";
+import fs from "fs";
+import os from "os";
+import path from "path";
+import { environment } from "@raycast/api";
+
+interface ElgatoService {
+ name: string;
+ type: string;
+ protocol: string;
+ addresses: string[];
+ referer: {
+ address: string;
+ family: string;
+ port: number;
+ size: number;
+ };
+ port: number;
+ host: string;
+ fqdn: string;
+}
const WARM_TEMPERATURE = 344; // 2900k
const COLD_TEMPERATURE = 143; // 7000k
const TEMPERATURE_STEP = (WARM_TEMPERATURE - COLD_TEMPERATURE) / 20; // 5%
-interface Preferences {
- keyLights_count: string;
+interface CacheData {
+ lights: Array<{ service: ElgatoService }>;
+ lastDiscoveryTime: number;
}
export class KeyLight {
+ private static CACHE_FILE = path.join(os.tmpdir(), "raycast-elgato-keylights.json");
static keyLights: Array<KeyLight>;
+ public readonly service: ElgatoService;
+
+ private static loadCache(): CacheData | null {
+ try {
+ if (fs.existsSync(this.CACHE_FILE)) {
+ const data = JSON.parse(fs.readFileSync(this.CACHE_FILE, "utf8"));
+ return data;
+ }
+ } catch (e) {
+ if (environment.isDevelopment) {
+ console.error("Failed to load cache:", e);
+ }
+ }
+ return null;
+ }
+
+ private static saveCache(lights: Array<KeyLight>) {
+ try {
+ const data: CacheData = {
+ lights: lights.map((light) => ({ service: light.service })),
+ lastDiscoveryTime: Date.now(),
+ };
+ fs.writeFileSync(this.CACHE_FILE, JSON.stringify(data));
+ } catch (e) {
+ if (environment.isDevelopment) {
+ console.error("Failed to save cache:", e);
+ }
+ }
+ }
+
+ private static clearCache() {
+ try {
+ if (fs.existsSync(this.CACHE_FILE)) {
+ fs.unlinkSync(this.CACHE_FILE);
+ }
+ } catch (e) {
+ if (environment.isDevelopment) {
+ console.error("Failed to clear cache:", e);
+ }
+ }
+ this.keyLights = [];
+ }
+
+ private static async validateCachedLights(lights: Array<KeyLight>): Promise<Array<KeyLight>> {
+ const validLights: Array<KeyLight> = [];
+
+ for (const light of lights) {
+ try {
+ // Try to fetch the light's state to verify it's still reachable
+ await light.getKeyLight(light.service);
+ validLights.push(light);
+ } catch (e) {
+ if (environment.isDevelopment) {
+ console.error(`Cached light ${light.service.name} is no longer reachable:`, e);
+ }
+ }
+ }
+
+ return validLights;
+ }
+
+ static async discover(forceRefresh = false) {
+ // Try to load from cache first
+ if (!forceRefresh) {
+ const cache = this.loadCache();
+ if (cache) {
+ if (environment.isDevelopment) {
+ console.log("Found cached lights, validating...");
+ }
+ this.keyLights = cache.lights.map((light) => new KeyLight(light.service));
+
+ // Validate cached lights are still reachable
+ this.keyLights = await this.validateCachedLights(this.keyLights);
+
+ if (this.keyLights.length > 0) {
+ if (environment.isDevelopment) {
+ console.log(
+ "Using validated cached Key Lights:",
+ this.keyLights.map((light) => `${light.service.name} at ${light.service.referer.address}`).join(", ")
+ );
+ }
+ return this.keyLights[0];
+ } else {
+ if (environment.isDevelopment) {
+ console.log("No cached lights are reachable, performing fresh discovery");
+ }
+ }
+ }
+ }
- static async discover() {
- const bonjour = Bonjour();
+ const bonjour = new BonjourService();
this.keyLights = [];
- const preferences = getPreferenceValues<Preferences>();
- const count: number = parseInt(preferences.keyLights_count, 10);
+ if (environment.isDevelopment) {
+ console.log("Starting Bonjour discovery for Key Lights...");
+ }
+
+ let discoveryComplete = false;
+
+ const find = new Promise<KeyLight>((resolve, reject) => {
+ const browser = bonjour.find({ type: "elg" }, (service: ElgatoService) => {
+ // Log complete service object for debugging
+ if (environment.isDevelopment) {
+ console.log(
+ "Bonjour service details:",
+ JSON.stringify(
+ {
+ name: service.name,
+ type: service.type,
+ protocol: service.protocol,
+ addresses: service.addresses,
+ referer: service.referer,
+ port: service.port,
+ host: service.host,
+ fqdn: service.fqdn,
+ },
+ null,
+ 2
+ )
+ );
+ }
+
+ // Get all possible addresses
+ const addresses = service.addresses || [];
+ const refererAddress = service.referer?.address;
+ if (refererAddress && !addresses.includes(refererAddress)) {
+ addresses.push(refererAddress);
+ }
+
+ if (environment.isDevelopment) {
+ console.log("Available addresses:", addresses);
+ }
+
+ // Filter out invalid addresses
+ const validAddresses = addresses.filter(
+ (addr: string) => addr && addr !== "0.0.0.0" && addr !== "127.0.0.1" && !addr.startsWith("fe80:") // Filter out link-local IPv6
+ );
+
+ if (environment.isDevelopment) {
+ console.log("Valid addresses:", validAddresses);
+ }
+
+ if (validAddresses.length === 0) {
+ if (environment.isDevelopment) {
+ console.error("No valid IP addresses found for service:", service.name);
+ }
+ return;
+ }
+
+ // Use the first valid address
+ const address = validAddresses[0];
+ if (environment.isDevelopment) {
+ console.log(`Using address ${address} for Key Light ${service.name}`);
+ }
+
+ // Check if we already have this light
+ const isDuplicate = this.keyLights.some(
+ (light) => light.service.name === service.name && light.service.referer.address === address
+ );
- const find = new Promise<KeyLight>((resolve) => {
- bonjour.find({ type: "elg" }, (service) => {
- const keyLight = new KeyLight(service);
+ if (isDuplicate) {
+ if (environment.isDevelopment) {
+ console.log(`Skipping duplicate Key Light: ${service.name}`);
+ }
+ return;
+ }
+
+ // Create a modified service object with the correct address
+ const serviceWithAddress = {
+ ...service,
+ referer: {
+ ...service.referer,
+ address: address,
+ },
+ };
+
+ const keyLight = new KeyLight(serviceWithAddress);
this.keyLights.push(keyLight);
+ if (environment.isDevelopment) {
+ console.log(`Added Key Light to list. Total lights found: ${this.keyLights.length}`);
+ }
- if (this.keyLights.length == count) {
+ // Save to cache as soon as we find lights
+ this.saveCache(this.keyLights);
+
+ // If discovery timeout has elapsed and we found at least one light, resolve
+ if (discoveryComplete && this.keyLights.length > 0) {
+ if (environment.isDevelopment) {
+ console.log(`Discovery complete. Found ${this.keyLights.length} Key Light(s)`);
+ }
resolve(keyLight);
+ browser.stop();
bonjour.destroy();
}
});
+
+ browser.on("up", (service: ElgatoService) => {
+ if (environment.isDevelopment) {
+ console.log("Service came up:", service.name);
+ }
+ });
+
+ browser.on("down", (service: ElgatoService) => {
+ if (environment.isDevelopment) {
+ console.log("Service went down:", service.name);
+ }
+ });
+
+ browser.on("error", (error: Error) => {
+ if (environment.isDevelopment) {
+ console.error("Bonjour browser error:", error);
+ }
+ reject(new Error(`Bonjour discovery error: ${error.toString()}`));
+ });
+
+ setTimeout(() => {
+ if (environment.isDevelopment) {
+ console.log(`Discovery timeout reached. Found ${this.keyLights.length} light(s)`);
+ }
+ discoveryComplete = true;
+
+ if (this.keyLights.length > 0) {
+ if (environment.isDevelopment) {
+ console.log(
+ "Successfully discovered Key Lights:",
+ this.keyLights.map((light) => `${light.service.name} at ${light.service.referer.address}`).join(", ")
+ );
+ }
+ resolve(this.keyLights[0]);
+ browser.stop();
+ bonjour.destroy();
+ } else {
+ reject(new Error("Cannot discover any Key Lights in the network"));
+ }
+ }, 5000);
greptile
logic: Discovery timeout (5000ms) is less than waitUntil timeout (6000ms) on line 275, which could lead to race conditions. Consider using the same value or making the discovery timeout longer
suggested fix
+ }, 6000);
diff block
+import { useEffect, useRef, useState } from "react";
+import "./tooltip.scss";
+
+export interface TooltipProps {
+ children: React.ReactNode;
+ content: string;
+ position?: "top" | "bottom" | "left" | "right";
+ showArrow?: boolean;
+ offset?: number;
+}
+
+export function Tooltip({
+ children,
+ content,
+ position = "top",
+ offset = 8,
+ showArrow = true,
+}: Readonly<TooltipProps>) {
+ const [isHovering, setIsHovering] = useState(false);
+ const [isVisible, setIsVisible] = useState(false);
+ const tooltipRef = useRef<HTMLDivElement>(null);
+
+ useEffect(() => {
+ if (isHovering) {
+ setIsVisible(true);
+ return;
+ }
+
+ const element = tooltipRef.current;
+ if (!element) return;
+
+ const hideTooltip = () => !isHovering && setIsVisible(false);
+ element.addEventListener("transitionend", hideTooltip);
greptile
style: Potential race condition if isHovering changes between closure creation and transitionend event firing
suggested fix
+ const hideTooltip = () => setIsVisible(false);
element.addEventListener("transitionend", hideTooltip);
diff block
+/* eslint-disable @typescript-eslint/no-unused-vars */
+/* eslint-disable unused-imports/no-unused-vars */
+/* eslint-disable @typescript-eslint/no-non-null-assertion */
+/* eslint-disable @nx/workspace-explicit-boolean-predicates-in-if */
+/* eslint-disable project-structure/folder-structure */
+import {
+ CurrentWorkspaceMember,
+ currentWorkspaceMemberState,
+} from '@/auth/states/currentWorkspaceMemberState';
+import { currentWorkspaceState } from '@/auth/states/currentWorkspaceState';
+import { useFirestoreDb } from '@/chat/call-center/hooks/useFirestoreDb';
+import { useUploadFileToBucket } from '@/chat/hooks/useUploadFileToBucket';
+import {
+ ChatContextType,
+ IChat,
+ IChatUser,
+ ISearchResult,
+ Message,
+ TDateFirestore,
+} from '@/chat/internal/types/chat';
+import { MessageType } from '@/chat/types/MessageType';
+import { CoreObjectNameSingular } from '@/object-metadata/types/CoreObjectNameSingular';
+import { useFindManyRecords } from '@/object-record/hooks/useFindManyRecords';
+import { SnackBarVariant } from '@/ui/feedback/snack-bar-manager/components/SnackBar';
+import { useSnackBar } from '@/ui/feedback/snack-bar-manager/hooks/useSnackBar';
+import { WorkspaceMember } from '@/workspace-member/types/WorkspaceMember';
+import {
+ and,
+ collection,
+ doc,
+ documentId,
+ getDoc,
+ getDocs,
+ onSnapshot,
+ query,
+ setDoc,
+ where,
+} from 'firebase/firestore';
+import { createContext, useEffect, useState } from 'react';
+import { useRecoilValue } from 'recoil';
+import { v4 } from 'uuid';
+
+export const ChatContext = createContext<ChatContextType | null>(null);
+
+const ChatProvider = ({ children }: { children: React.ReactNode }) => {
+ const [queryResult, setQueryResult] = useState<ISearchResult | null>(null);
+ const [workspaceUsers, setWorkspaceUsers] = useState<IChatUser[]>([]);
+ const [otherUserStatus, setOtherUserStatus] = useState<string>('');
+ const [imageUpload, setImageUpload] = useState<File | null>(null);
+ const [fileUpload, setFileUpload] = useState<File | null>(null);
+ const [goingToMessageIndex, setGoingToMessageIndex] = useState<
+ number | undefined
+ >(undefined);
+ const [searchQuery, setSearchQuery] = useState<string>('');
+ const [isNewChatOpen, setIsNewChatOpen] = useState(false);
+ const [newMessage, setNewMessage] = useState<string>('');
+ const [isStatusOpen, setIsStatusOpen] = useState(false);
+ const [isServiceStatusOpen, setIsServiceStatusOpen] = useState(false);
+ const [isSearchOpen, setIsSearchOpen] = useState(false);
+ const [userChat, setUserChat] = useState<IChatUser>();
+ const [isSearching, setIsSearching] = useState(false);
+ const [thisUserStatus, setThisUserStatus] = useState<
+ 'Available' | 'Busy' | 'Away'
+ >('Available');
+ const [thisServiceStatus, setThisServiceStatus] = useState<
+ 'Resolved' | 'In progress' | 'Waiting' | 'On hold' | 'Pending'
+ >('Pending');
+ const [isAnexOpen, setIsAnexOpen] = useState(false);
+ const [openChat, setOpenChat] = useState<IChat>();
+ const [chatId, setChatId] = useState<string>('');
+ const [isDetailsOpen, setIsDetailsOpen] = useState(false);
+
+ const {firestoreDb} = useFirestoreDb()
+
+ const usersRef = collection(firestoreDb, 'users');
+ const chatsRef = collection(firestoreDb, 'chats');
+
+ const currentWorkspaceMember = useRecoilValue(currentWorkspaceMemberState);
+ const currentWorkspace = useRecoilValue(currentWorkspaceState);
+
+ const { records: workspaceMembers, refetch: refetchMembers } =
+ useFindManyRecords<WorkspaceMember>({
+ objectNameSingular: CoreObjectNameSingular.WorkspaceMember,
+ });
+
+ const { uploadFileToBucket } = useUploadFileToBucket();
+ const { enqueueSnackBar } = useSnackBar();
+ // const { sendChatNotification } = useSendChatNotification();
+
+ useEffect(() => {}, [refetchMembers]);
+
+ // 1°: ver se ja existe um user com esse workspace, se nao: criar um
+ useEffect(() => {
+ createUser(currentWorkspace?.id, currentWorkspaceMember);
+
+ setTimeout(() => {
+ updateUser();
+ }, 1000); // it has to wait to avoid race conditions with the user creation. updateUser() will update the user's avatar and name, because it can change and the avatar.url gets expired after a while
greptile
logic: Race condition risk: setTimeout is used to handle user creation timing. Consider using async/await or a more robust synchronization mechanism.
diff block
"agent interrupted",
extra={"speech_id": self.speech_id},
)
+ logger.info(f"AGENT INTERRUPTED TEXT: {self.tts_forwarder.played_text}")
+ if (
+ self.tts_forwarder.played_text
+ and self.tts_forwarder.played_text.strip() != ""
+ ):
+ AppConfig().call_metadata.update({"agent_has_been_interrupted": True})
+ else:
+ AppConfig().is_human_interrupted = True
greptile
logic: Race condition possible here - tts_forwarder.played_text could change between the check and the update to AppConfig
diff block
+// Copyright 2025 OpenObserve Inc.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+use std::collections::{HashMap, HashSet};
+
+use chrono::{DateTime, Utc};
+use config::RwAHashMap;
+
+use super::{
+ error::*,
+ handler::{ClientId, QuerierName, TraceId},
+};
+
+#[derive(Debug, Default)]
+pub struct SessionManager {
+ sessions: RwAHashMap<ClientId, SessionInfo>,
+ mapped_queriers: RwAHashMap<QuerierName, HashSet<TraceId>>,
+}
+
+#[derive(Debug, Clone)]
+pub struct SessionInfo {
+ pub querier_mappings: HashMap<TraceId, QuerierName>,
+ pub cookie_expiry: Option<DateTime<Utc>>,
+ pub last_active: DateTime<Utc>,
+}
+
+impl SessionManager {
+ pub async fn register_client(
+ &self,
+ client_id: &ClientId,
+ cookie_expiry: Option<DateTime<Utc>>,
+ ) {
+ if self.sessions.read().await.get(client_id).is_some() {
+ return;
+ }
+
+ let session_info = SessionInfo {
+ querier_mappings: HashMap::default(),
+ cookie_expiry,
+ last_active: Utc::now(),
+ };
+
+ let mut write_guard = self.sessions.write().await;
+ if !write_guard.contains_key(client_id) {
+ write_guard.insert(client_id.clone(), session_info.clone());
+ }
greptile
logic: Potential race condition between check and insert. Consider using entry API instead of separate contains_key check
suggested fix
let mut write_guard = self.sessions.write().await;
+ write_guard.entry(client_id.clone())
+ .or_insert_with(|| session_info.clone());
diff block
+import { queryOptions, useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
+import { useTerminal } from "./auth";
+import type { Terminal } from "@terminaldotshop/sdk";
+import { useCart } from "./cart";
+
+const addressesOptions = (terminal: Terminal) => {
+ return queryOptions({
+ queryKey: ["addresses"],
+ queryFn: async () => {
+ return terminal.address.list().then((r) => r.data);
+ },
+ initialData: [],
+ });
+};
+
+export const useAddress = (id?: string) => {
+ const terminal = useTerminal();
+ return useQuery({
+ ...addressesOptions(terminal),
+ select: (addresses) => addresses.find((a) => a.id === id),
+ });
+};
+
+export const useAddresses = () => {
+ const terminal = useTerminal();
+ return useQuery(addressesOptions(terminal));
+};
+
+export const useCreateAddress = () => {
+ const terminal = useTerminal();
+ const { refetch } = useAddresses();
+ return useMutation({
+ mutationFn: async (address: Terminal.AddressCreateParams) => {
+ await terminal.address.create(address);
+ await refetch();
+ },
+ });
+};
+
+export const useSetAddress = () => {
+ const terminal = useTerminal();
+ const qc = useQueryClient();
+ const { data: cart, refetch } = useCart();
+ return useMutation({
+ mutationFn: async (addressID: string) => {
+ await terminal.cart.setAddress({ addressID });
+ qc.setQueryData(["cart"], { ...cart, addressID });
+ await refetch();
greptile
logic: Race condition possible between setAddress API call and optimistic update. Consider using onSuccess callback instead
suggested fix
await terminal.cart.setAddress({ addressID });
},
+ onSuccess: () => {
qc.setQueryData(["cart"], { ...cart, addressID });
+ refetch();
+ }
diff block
+import { queryOptions, useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
+import { useTerminal } from "./auth";
+import type { Terminal } from "@terminaldotshop/sdk";
+import { useCart } from "./cart";
+
+const addressesOptions = (terminal: Terminal) => {
+ return queryOptions({
+ queryKey: ["addresses"],
+ queryFn: async () => {
+ return terminal.address.list().then((r) => r.data);
+ },
+ initialData: [],
+ });
+};
+
+export const useAddress = (id?: string) => {
+ const terminal = useTerminal();
+ return useQuery({
+ ...addressesOptions(terminal),
+ select: (addresses) => addresses.find((a) => a.id === id),
+ });
+};
+
+export const useAddresses = () => {
+ const terminal = useTerminal();
+ return useQuery(addressesOptions(terminal));
+};
+
+export const useCreateAddress = () => {
+ const terminal = useTerminal();
+ const { refetch } = useAddresses();
+ return useMutation({
+ mutationFn: async (address: Terminal.AddressCreateParams) => {
+ await terminal.address.create(address);
+ await refetch();
+ },
greptile
style: Consider using onSuccess callback for refetch to avoid potential race conditions
suggested fix
mutationFn: async (address: Terminal.AddressCreateParams) => {
await terminal.address.create(address);
},
+ onSuccess: () => {
+ refetch();
+ }
diff block
+//
+// WaveformIndicator.swift
+// Onit
+//
+// Created by OpenHands on 3/17/2025.
+//
+
+import SwiftUI
+
+struct RecordingIndicator: View {
+ @ObservedObject var audioRecorder: AudioRecorder
+
+ var body: some View {
+ Group {
+ if audioRecorder.isTranscribing {
+ LoadingIndicator()
+ } else {
+ if audioRecorder.isRecording {
+ WaveformIndicator(audioLevel: audioRecorder.audioLevel)
+ }
+ }
+ }
+ .onChange(of: audioRecorder.isTranscribing) { _ in
+ // Redraw the view when isTranscribing changes
+ }
+ }
+}
+
+struct WaveformIndicator: View {
+ var audioLevel: Float
+
+ @State private var phase = 0.0
+ private let numberOfBars = 4
+ private let animationDuration = 0.6
+ private let minHeight: CGFloat = 2
+ private let maxHeight: CGFloat = 14
+ @State private var barVariations: [Double] = []
+
+ init(audioLevel: Float = 0.0) {
+ self.audioLevel = audioLevel
+ self._barVariations = State(initialValue: Array(repeating: 1.0, count: 4))
+ }
+
+ var body: some View {
+ HStack(spacing: 3) {
+ ForEach(0..<numberOfBars, id: \.self) { index in
+ bar(for: index)
+ }
+ }
+ .frame(height: 22)
+ .padding(.horizontal, 8)
+ .background(
+ RoundedRectangle(cornerRadius: 11)
+ .fill(Color.red.opacity(0.2))
+ )
+ .onAppear {
+ withAnimation(.linear(duration: animationDuration).repeatForever(autoreverses: false)) {
+ phase = 1.0
+ }
+ }
+ }
+ private func bar(for index: Int) -> some View {
+ return RoundedRectangle(cornerRadius: 1)
+ .fill(Color.red)
+ .frame(width: 2, height: barHeight(for: index))
+ .animation(
+ .easeInOut(duration: 0.1), // Quick response to audio changes
+ value: audioLevel
+ )
+ }
+
+ private func barHeight(for index: Int) -> CGFloat {
+ // Base height is determined by the audio level
+ let baseHeight = minHeight + (maxHeight - minHeight) * CGFloat(audioLevel)
+
+
+ // Add some variation between bars to make it look more natural
+ let newVariation = Double.random(in: 0.5...1.2)
+ var newVariations = barVariations
+ newVariations[index] = barVariations[index] * 0.8 + newVariation * 0.2
+ DispatchQueue.main.async {
+ barVariations = newVariations
+ }
greptile
logic: Updating @State directly in DispatchQueue.main.async can cause race conditions. Use SwiftUI's state management instead.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple processes call this simultaneously - changedFiles is a global variable being modified asynchronously
diff block
sentence_stream = text_data.sentence_stream
forward_start_time = time.time()
+ AppConfig().playout_start_time = forward_start_time
greptile
logic: Writing to global AppConfig() state could cause race conditions if multiple TTSSegmentsForwarder instances run concurrently. Consider passing timing info through events instead.
diff block
statsKey: `cdpConsumer.handleEachBatch.parseKafkaMessages`,
func: async () => {
const events: HogFunctionInvocationGlobals[] = []
+
await Promise.all(
messages.map(async (message) => {
try {
const clickHouseEvent = parseJSON(message.value!.toString()) as RawClickHouseEvent
- if (!this.hogFunctionManager.teamHasHogDestinations(clickHouseEvent.team_id)) {
- // No need to continue if the team doesn't have any functions
- return
- }
+ const [teamHogFunctions, team] = await Promise.all([
+ this.hogFunctionManager.getHogFunctionsForTeam(clickHouseEvent.team_id, [
+ 'destination',
+ ]),
+ this.hub.teamManager.fetchTeam(clickHouseEvent.team_id),
+ ])
greptile
logic: Parallel fetching of team and functions could lead to race conditions if team is deleted between fetches. Consider fetching team first and early-returning if not found.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls happen while changedFiles is null. Consider adding a lock mechanism.
diff block
+import { queryOptions, useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
+import { useTerminal } from "./auth";
+import type { Terminal } from "@terminaldotshop/sdk";
+import { useCart } from "./cart";
+
+const addressesOptions = (terminal: Terminal) => {
+ return queryOptions({
+ queryKey: ["addresses"],
+ queryFn: async () => {
+ return terminal.address.list().then((r) => r.data);
+ },
+ initialData: [],
+ });
+};
+
+export const useAddress = (id?: string) => {
+ const terminal = useTerminal();
+ return useQuery({
+ ...addressesOptions(terminal),
+ select: (addresses) => addresses.find((a) => a.id === id),
+ });
+};
+
+export const useAddresses = () => {
+ const terminal = useTerminal();
+ return useQuery(addressesOptions(terminal));
+};
+
+export const useCreateAddress = () => {
+ const terminal = useTerminal();
+ const { refetch } = useAddresses();
+ return useMutation({
+ mutationFn: async (address: Terminal.AddressCreateParams) => {
+ await terminal.address.create(address);
+ await refetch();
+ },
greptile
logic: Potential race condition between create and refetch. Consider using the mutation's onSuccess callback instead of awaiting both operations
suggested fix
mutationFn: async (address: Terminal.AddressCreateParams) => {
+ return terminal.address.create(address);
},
+ onSuccess: () => refetch(),
diff block
+import { queryOptions, useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
+import { useTerminal } from "./auth";
+import type { Terminal } from "@terminaldotshop/sdk";
+import { useCart } from "./cart";
+
+const addressesOptions = (terminal: Terminal) => {
+ return queryOptions({
+ queryKey: ["addresses"],
+ queryFn: async () => {
+ return terminal.address.list().then((r) => r.data);
+ },
+ initialData: [],
+ });
+};
+
+export const useAddress = (id?: string) => {
+ const terminal = useTerminal();
+ return useQuery({
+ ...addressesOptions(terminal),
+ select: (addresses) => addresses.find((a) => a.id === id),
+ });
+};
+
+export const useAddresses = () => {
+ const terminal = useTerminal();
+ return useQuery(addressesOptions(terminal));
+};
+
+export const useCreateAddress = () => {
+ const terminal = useTerminal();
+ const { refetch } = useAddresses();
+ return useMutation({
+ mutationFn: async (address: Terminal.AddressCreateParams) => {
+ await terminal.address.create(address);
+ await refetch();
+ },
+ });
+};
+
+export const useSetAddress = () => {
+ const terminal = useTerminal();
+ const qc = useQueryClient();
+ const { data: cart, refetch } = useCart();
+ return useMutation({
+ mutationFn: async (addressID: string) => {
+ await terminal.cart.setAddress({ addressID });
+ qc.setQueryData(["cart"], { ...cart, addressID });
+ await refetch();
+ },
greptile
style: Similar race condition risk between setAddress, setQueryData and refetch. Consider using optimistic updates pattern instead
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state.
diff block
+import type { User } from "@/types";
+import { createContext, useEffect, useMemo, useState } from "react";
+import { mockUser } from "./mock/sidebar-mock";
+
+interface SidebarContext {
+ user: User | null;
+ isResizing: boolean;
+ isCollapsed: boolean;
+ currentWidth: number;
+ stopResizing: () => void;
+ startResizing: () => void;
+ setIsCollapsed: (isCollapsed: boolean) => void;
+ setCurrentWidth: (currentWidth: number) => void;
+ sidebarSizes: {
+ MIN: number;
+ MAX: number;
+ DEFAULT: number;
+ COLLAPSED: number;
+ };
+}
+
+const SIDEBAR_SIZES = {
+ MIN: 200,
+ MAX: 400,
+ DEFAULT: 250,
+ COLLAPSED: 72,
+} as const;
+
+export const SidebarContext = createContext<SidebarContext>({
+ user: null,
+ isResizing: false,
+ isCollapsed: false,
+ stopResizing: () => {},
+ startResizing: () => {},
+ setIsCollapsed: () => {},
+ setCurrentWidth: () => {},
+ sidebarSizes: SIDEBAR_SIZES,
+ currentWidth: SIDEBAR_SIZES.DEFAULT,
+});
+
+export function SidebarProvider({
+ children,
+}: Readonly<{ children: React.ReactNode }>) {
+ const [isResizing, setIsResizing] = useState<boolean>(false);
+ const [isCollapsed, setIsCollapsed] = useState<boolean>(false);
+ const [currentWidth, setCurrentWidth] = useState<number>(
+ SIDEBAR_SIZES.DEFAULT
+ );
+ const user = mockUser;
+
+ const startResizing = () => {
+ setIsResizing(true);
+ document.body.classList.add("sidebar-resizing");
+ };
+
+ const stopResizing = () => {
+ setIsResizing(false);
+ document.body.classList.remove("sidebar-resizing");
+ };
+
+ const handleResize = (e: MouseEvent) => {
+ if (!isResizing) return;
+
+ if (e.clientX < SIDEBAR_SIZES.MIN / 2) {
+ setIsCollapsed(true);
+ } else if (e.clientX > SIDEBAR_SIZES.MIN) {
+ setIsCollapsed(false);
+ }
greptile
style: Potential race condition between setIsCollapsed and setCurrentWidth. Consider batching these state updates
diff block
linksList.value = mainLayoutMixin
.setup()
.leftNavigationLinks(linksList, t);
- filterMenus();
}
+
store.dispatch("setConfig", res.data);
await nextTick();
+ filterMenus();
+
+ updateActionsMenu();
greptile
logic: filterMenus() and updateActionsMenu() order may cause race conditions. Consider combining these operations
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs execute simultaneously. The null check isn't thread-safe.
diff block
input: &serde_json::value::RawValue,
unique: bool,
) -> WorkflowResult<Uuid> {
+ // TODO: Race condition if two unique dispatch_workflow calls are made at the same time. The txn
+ // inside of find_workflow should be split into find_workflow_inner and run in the same txn in this
+ // function
if unique {
greptile
logic: Race condition in unique workflow dispatch needs to be fixed by combining find_workflow and dispatch_workflow transactions
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls occur before changedFiles is populated. Consider adding a lock mechanism.
diff block
public async storeSessionConsoleLogs(logs: ConsoleLogEntry[]): Promise<void> {
if (logs.length === 0 || !this.topic) {
- return
+ return Promise.resolve()
}
- await this.producer.queueMessages({
- topic: this.topic,
- messages: logs.map((log) => ({
- value: JSON.stringify(log),
- key: log.log_source_id, // Using session_id as the key for partitioning
- })),
- })
+ if (this.pendingPromises.length >= this.promiseLimit) {
+ this.syncPromise = this.sync()
+ }
+
+ if (this.syncPromise) {
+ await this.syncPromise
+ return this.storeSessionConsoleLogs(logs)
+ }
+
+ this.pendingPromises.push(
+ this.producer.queueMessages({
+ topic: this.topic,
+ messages: logs.map((log) => ({
+ value: JSON.stringify(log),
+ key: log.log_source_id,
+ })),
+ })
+ )
this.consoleLogsCount += logs.length
logger.debug(`stored ${logs.length} console logs for session ${logs[0].log_source_id}`)
SessionBatchMetrics.incrementConsoleLogsStored(logs.length)
+ return Promise.resolve()
}
public async flush(): Promise<void> {
+ if (this.syncPromise) {
+ await this.syncPromise
+ return this.flush()
+ } else {
+ await this.sync()
+ }
+
logger.info(`flushing ${this.consoleLogsCount} console logs`)
await this.producer.flush()
this.consoleLogsCount = 0
}
+
+ private async sync(): Promise<void> {
+ if (this.syncPromise) {
+ return this.syncPromise
+ }
greptile
logic: Potential race condition: if syncPromise is set between the check and return, this could lead to multiple concurrent syncs. Consider using a local variable to store syncPromise.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple runs occur simultaneously due to shared changedFiles state. Consider passing state as parameters instead.
diff block
+// Copyright 2025 OpenObserve Inc.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+use std::sync::Arc;
+
+use config::RwAHashMap;
+
+use super::{
+ config::*,
+ connection::{Connection, QuerierConnection},
+ error::*,
+ handler::QuerierName,
+};
+
+#[derive(Debug)]
+pub struct QuerierConnectionPool {
+ connections: RwAHashMap<QuerierName, Arc<QuerierConnection>>,
+ config: WsConfig,
+}
+
+impl QuerierConnectionPool {
+ pub fn new(config: WsConfig) -> Self {
+ Self {
+ connections: RwAHashMap::default(),
+ config,
+ }
+ }
+
+ pub async fn get_or_create_connection(
+ &self,
+ querier_name: &QuerierName,
+ ) -> WsResult<Arc<QuerierConnection>> {
+ if let Some(conn) = self.connections.read().await.get(querier_name) {
+ // double check if the connection is still connected
+ return if conn.is_connected().await {
+ Ok(conn.clone())
+ } else {
+ Err(WsError::ConnectionDisconnected)
+ };
+ }
+
+ // Create new connection
+ let conn = super::connection::create_connection(querier_name).await?;
+ self.connections
+ .write()
+ .await
+ .insert(querier_name.to_string(), conn.clone());
+ Ok(conn)
+ }
greptile
logic: Potential race condition in get_or_create_connection between read check and write insert. Another thread could create the same connection between the read and write operations. Consider using a single write lock operation or implementing proper synchronization.
diff block
setUserPrompt("");
// React sucks and doesn't get the latest state
- await persistChat({ ...localChat, title, messages: finalMessages });
+ // Use current title from localChat which may have been updated asynchronously
+ const currentTitle = localChat.title === "New Chat" ? title : localChat.title;
+ await persistChat({ ...localChat, title: currentTitle, messages: finalMessages });
greptile
logic: Race condition possible here - localChat.title might not reflect latest state due to React's batching. Use a ref to track latest title.
diff block
+import { useState, useEffect, useMemo } from "react";
+import { List, ActionPanel, Action, Icon, Toast, showToast, Color, useNavigation } from "@raycast/api";
+import { MarketplaceService, GCPService } from "./ServiceHubService";
+import ServiceDetails from "./components/ServiceDetails";
+import { GCPServiceCategory } from "../../utils/gcpServices";
+
+interface ViewProps {
+ projectId: string;
+ gcloudPath: string;
+}
+
+export default function ServiceHubView({ projectId, gcloudPath }: ViewProps) {
+ const [services, setServices] = useState<GCPService[]>([]);
+ const [searchText, setSearchText] = useState("");
+ const [error, setError] = useState<string | null>(null);
+ const [isLoading, setIsLoading] = useState(true);
+ const [isRefreshing, setIsRefreshing] = useState(false);
+ const [selectedCategory, setSelectedCategory] = useState<string>("all");
+ const [categories, setCategories] = useState<string[]>([]);
+ const { push } = useNavigation();
+ const [showOnlyEnabled, setShowOnlyEnabled] = useState(false);
+ const [showCoreServicesOnly, setShowCoreServicesOnly] = useState(true);
+
+ // Service initialization
+ const serviceHub = useMemo(() => new MarketplaceService(gcloudPath, projectId), [gcloudPath, projectId]);
+
+ // Fetch categories on component mount
+ useEffect(() => {
+ fetchCategories();
+ }, []);
+
+ // Fetch services when category or filter changes
+ useEffect(() => {
+ // First load with local data for instant UI
+ fetchLocalServices();
+ // Then fetch from API
+ fetchServices();
+ }, [selectedCategory, showCoreServicesOnly]);
greptile
logic: fetchLocalServices and fetchServices are both called on mount and filter changes, which could cause race conditions. Consider using a single fetch operation with a loading state.
diff block
+import { ActionPanel, Action, List, Icon, useNavigation, showToast, Toast, Color, Cache } from "@raycast/api";
+import { useState, useEffect } from "react";
+import { CacheManager, Project } from "../utils/CacheManager";
+import ProjectView from "../ProjectView";
+import { executeGcloudCommand } from "../gcloud";
+
+interface CachedProjectViewProps {
+ gcloudPath: string;
+ onLoginWithDifferentAccount?: () => void;
+}
+
+interface GCloudProject {
+ projectId: string;
+ name: string;
+ projectNumber: string;
+ createTime: string;
+}
+
+interface ProjectDetails {
+ projectId: string;
+ name: string;
+ projectNumber: string;
+ createTime: string;
+}
+
+// Create a navigation cache instance
+const navigationCache = new Cache({ namespace: "navigation-state" });
+// Create a settings cache instance
+const settingsCache = new Cache({ namespace: "settings" });
+
+export default function CachedProjectView({ gcloudPath, onLoginWithDifferentAccount }: CachedProjectViewProps) {
+ const [isLoading, setIsLoading] = useState(true);
+ const [cachedProject, setCachedProject] = useState<{ projectId: string; timestamp: number } | null>(null);
+ const [projectDetails, setProjectDetails] = useState<ProjectDetails | null>(null);
+ const [recentlyUsedProjects, setRecentlyUsedProjects] = useState<Project[]>([]);
+ const [error, setError] = useState<string | null>(null);
+ const [shouldNavigate, setShouldNavigate] = useState<{ action: string; projectId?: string } | null>(null);
+ const [cacheLimit, setCacheLimit] = useState<number>(1);
+ const [authCacheDuration, setAuthCacheDuration] = useState<number>(24);
+ const { pop, push } = useNavigation();
+
+ // Define initialize function that can be called from anywhere in the component
+ async function initialize() {
+ setIsLoading(true);
+ try {
+ const loadingToast = await showToast({
+ style: Toast.Style.Animated,
+ title: "Loading cached project...",
+ message: "Retrieving your last used project",
+ });
+
+ // Get current cache limit
+ const limit = CacheManager.getCacheLimit();
+ setCacheLimit(limit);
+
+ // Ensure recently used projects list respects the cache limit
+ CacheManager.syncRecentlyUsedProjectsWithCacheLimit();
+
+ // Get cached project
+ const cached = CacheManager.getSelectedProject();
+ setCachedProject(cached);
+
+ if (cached) {
+ try {
+ // Try to get project details using the helper method
+ const details = await CacheManager.getProjectDetails(cached.projectId, gcloudPath);
+ if (details) {
+ setProjectDetails({
+ projectId: details.id,
+ name: details.name,
+ projectNumber: details.projectNumber,
+ createTime: details.createTime,
+ });
+ }
+ } catch (error) {
+ console.error("Error fetching cached project details:", error);
+ }
+ }
+
+ // Get exactly the number of recently used projects that matches the cache limit
+ const recentProjects = await CacheManager.getRecentlyUsedProjectsWithDetails(gcloudPath);
+ console.log("Fetched recent projects:", recentProjects);
+
+ // Ensure we show exactly the number of projects configured in the cache limit
+ setRecentlyUsedProjects(recentProjects.slice(0, limit));
+
+ // Get all projects to keep them cached for when user selects "Browse All Projects"
+ try {
+ const result = await executeGcloudCommand(gcloudPath, "projects list --format=json");
+ if (Array.isArray(result) && result.length > 0) {
+ const allProjects = result.map((project: GCloudProject) => ({
+ id: project.projectId,
+ name: project.name || project.projectId,
+ projectNumber: project.projectNumber || "",
+ createTime: project.createTime || new Date().toISOString(),
+ }));
+ CacheManager.saveProjectsList(allProjects);
+ }
+ } catch (error) {
+ console.error("Error fetching all projects:", error);
+ }
+
+ loadingToast.hide();
+ } catch (error) {
+ console.error("Error initializing cached project view:", error);
+ setError("Failed to load cached project");
+
+ showToast({
+ style: Toast.Style.Failure,
+ title: "Failed to load cached project",
+ message: error instanceof Error ? error.message : String(error),
+ });
+ } finally {
+ setIsLoading(false);
+ }
+ }
+
+ useEffect(() => {
+ initialize();
+ }, [gcloudPath]);
+
+ // Load settings from cache on component mount
+ useEffect(() => {
+ // Load cache limit setting
+ const cachedLimit = settingsCache.get("cache-limit");
+ if (cachedLimit) {
+ setCacheLimit(parseInt(cachedLimit, 10));
+ }
+
+ // Load auth cache duration setting
+ const cachedAuthDuration = settingsCache.get("auth-cache-duration");
+ if (cachedAuthDuration) {
+ setAuthCacheDuration(parseInt(cachedAuthDuration, 10));
+ }
+ }, []);
+
+ // Handle navigation with useEffect
+ useEffect(() => {
+ if (!shouldNavigate) return;
+
+ const performNavigation = async () => {
+ try {
+ let loadingToast: Toast | null = null;
+
+ if (shouldNavigate.action === "continue" && cachedProject) {
+ if (!cachedProject.projectId || typeof cachedProject.projectId !== "string") {
+ throw new Error("Invalid cached project ID");
+ }
+
+ loadingToast = await showToast({
+ style: Toast.Style.Animated,
+ title: "Opening project...",
+ message: cachedProject.projectId,
+ });
+
+ // Short delay to show the toast before navigation
+ setTimeout(() => {
+ loadingToast?.hide();
+ push(<ProjectView projectId={cachedProject.projectId} gcloudPath={gcloudPath} />);
+ }, 500);
+ } else if (shouldNavigate.action === "select" && shouldNavigate.projectId) {
greptile
style: Using setTimeout for navigation could cause race conditions if user performs multiple actions quickly
diff block
+import { createContext, useContext, useEffect, useState, PropsWithChildren, FC, useLayoutEffect } from 'react'
+import type { TamaguiThemeTypes } from 'electron/main/electron-store/storeConfig'
+import { TamaguiProvider } from 'tamagui'
+import tamaguiConfig from '../../tamagui.config'
+import { themes } from '@/components/Editor/ui/src/tamagui/themes/theme'
+
+interface ThemeActions {
+ toggle: () => void
+ set: (theme: TamaguiThemeTypes) => void
+ syncWithSystem: () => void
+}
+
+export interface ThemeContextValue {
+ state: TamaguiThemeTypes
+ actions: ThemeActions
+}
+
+const ThemeContext = createContext<ThemeContextValue | null>(null)
+
+export class ThemeManager {
+ private state: TamaguiThemeTypes
+ private setState: (theme: TamaguiThemeTypes) => void
+
+ constructor(initialTheme: TamaguiThemeTypes, setState: (theme: TamaguiThemeTypes) => void) {
+ this.state = initialTheme
+ this.setState = setState
+ }
+
+ private async updateTheme(newTheme: TamaguiThemeTypes) {
+ this.state = newTheme
+ this.setState(newTheme)
+ await window.electronStore.setTamaguiTheme(newTheme)
+ }
+
+ getContextValue(): ThemeContextValue {
+ return {
+ state: this.state,
+ actions: {
+ toggle: () => {
+ const newTheme = this.state === 'light' ? 'dark' : 'light'
+ this.updateTheme(newTheme)
+ },
+ set: (theme: TamaguiThemeTypes) => {
+ this.updateTheme(theme)
+ },
+ syncWithSystem: () => {
+ const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches
+ this.updateTheme(prefersDark ? 'dark' : 'light')
+ },
+ },
+ }
+ }
+}
+
+/**
+ * Stores, gets, and updates the theme
+ */
+export const ThemeProvider: FC<PropsWithChildren> = ({ children }) => {
+ const [theme, setTheme] = useState<TamaguiThemeTypes>('light')
+ const [manager, setManager] = useState<ThemeManager | null>(null)
+
+ useEffect(() => {
+ const initTheme = async () => {
+ const savedTheme = await window.electronStore.getTamaguiTheme()
+ console.log(`Fetched theme from store: ${savedTheme}`)
+ setTheme(savedTheme || 'light')
+ setManager(new ThemeManager(savedTheme || 'light', setTheme))
+ }
+
+ initTheme()
+ }, [])
greptile
style: Race condition possible between theme fetch and manager initialization. Consider using a loading state instead of null check.
diff block
+import { mergeAttributes, Node } from '@tiptap/core'
+import { Fragment, Node as PMNode, Slice } from 'prosemirror-model'
+import { EditorState, NodeSelection, Plugin, PluginKey, TextSelection } from 'prosemirror-state'
+import { Decoration, DecorationSet } from 'prosemirror-view'
+import { blockToNode, inlineContentToNodes } from '../../../api/nodeConversions/nodeConversions'
+
+import { BlockChildrenType } from '../api/blockTypes'
+import { ResolvedPos } from '@tiptap/pm/model'
+import { EditorView } from '@tiptap/pm/view'
+import { mergeCSSClasses } from '../../../shared/utils'
+import { BlockNoteDOMAttributes, BlockSchema, PartialBlock } from '../api/blockTypes'
+import { getBlockInfoFromPos } from '../helpers/getBlockInfoFromPos'
+import { getGroupInfoFromPos } from '../helpers/getGroupInfoFromPos'
+import styles from './Block.module.css'
+import BlockAttributes from './BlockAttributes'
+
+const SelectionPluginKey = new PluginKey('selectionPluginKey')
+const ClickSelectionPluginKey = new PluginKey('clickSelectionPluginKey')
+const PastePluginKey = new PluginKey('pastePluginKey')
+const headingLinePluginKey = new PluginKey('HeadingLinePlugin')
+
+const SelectionPlugin = new Plugin({
+ key: SelectionPluginKey,
+ state: {
+ init() {
+ return DecorationSet.empty
+ },
+ apply(tr, oldState) {
+ return tr.getMeta(SelectionPluginKey) || oldState
+ },
+ },
+ props: {
+ decorations(state) {
+ return this.getState(state)
+ },
+ },
+})
+
+const ClickSelectionPlugin = new Plugin({
+ key: ClickSelectionPluginKey,
+ props: {
+ handleDOMEvents: {
+ mousedown(view, event) {
+ if (event.shiftKey && event.button === 0) {
+ const { state } = view
+ const editorBoundingBox = (view.dom.firstChild! as HTMLElement).getBoundingClientRect()
+ const coords = {
+ left: editorBoundingBox.left + editorBoundingBox.width / 2, // take middle of editor
+ top: event.clientY,
+ }
+ let pos = view.posAtCoords(coords)
+ if (!pos) {
+ return undefined
+ }
+ const { selection } = state
+ const selectedPos = state.doc.resolve(selection.from)
+ const nodePos = state.doc.resolve(pos.pos)
+ if (selectedPos.start() === selection.from && pos.pos === nodePos.end()) {
+ const decoration = Decoration.widget(nodePos.pos, () => {
+ const span = document.createElement('span')
+ span.style.backgroundColor = 'blue'
+ span.style.width = '10px'
+ span.style.height = '10px'
+ return span
+ })
+ const decorationSet = DecorationSet.create(state.doc, [decoration])
+ view.dispatch(state.tr.setMeta(SelectionPluginKey, decorationSet))
+ }
+ return false
+ }
+ return false
+ },
+ },
+ },
+})
+
+const PastePlugin = new Plugin({
+ key: PastePluginKey,
+ props: {
+ handlePaste: (view, event) => {
+ if (!event.clipboardData) {
+ return false
+ }
+
+ const { state } = view
+ let { tr } = state
+ const { selection } = state
+ const { $from, $to } = selection
+
+ const targetNode = state.doc.resolve($from.pos).parent
+
+ if (targetNode.type.name === 'image') {
+ tr = tr.insertText(event.clipboardData.getData('text/plain'), $from.pos, $to.pos)
+ view.dispatch(tr)
+ return true
+ }
+
+ return false
+ },
+ },
+})
+
+const headingLinePlugin = new Plugin({
+ key: headingLinePluginKey,
+ view(editorView) {
+ return new HeadingLinePlugin(editorView)
+ },
+})
+
+class HeadingLinePlugin {
+ private line: HTMLElement
+ constructor(view: EditorView) {
+ this.line = document.createElement('div')
+ this.line.style.transition = 'all 0.15s ease-in-out'
+ this.line.style.pointerEvents = 'none'
+ this.line.style.display = ''
+ this.line.style.opacity = '0'
+ view.dom.parentNode?.appendChild(this.line)
+
+ this.update(view, null)
+ }
+
+ update(view: EditorView, lastState: EditorState | null) {
+ let state = view.state
+ // Don't do anything if the document/selection didn't change
+ if (lastState && lastState.doc.eq(state.doc) && lastState.selection.eq(state.selection)) return
+
+ let res = getNearestHeadingFromPos(state, state.selection.from)
+
+ if (res && res.heading?.type.name === 'heading') {
+ let { node } = view.domAtPos(res.groupStartPos)
+
+ let rect = (node as HTMLElement).getBoundingClientRect()
+ let editorRect = view.dom.getBoundingClientRect()
+ let groupPadding = 10
+ let editorPaddingTop = 40
+ this.line.style.position = 'absolute'
+ this.line.style.top = `${rect.top + editorPaddingTop + groupPadding - editorRect.top}px`
+ this.line.style.left = `${rect.left - editorRect.left + groupPadding}px`
+ this.line.style.width = `2.5px`
+ this.line.style.height = `${rect.height - groupPadding * 2}px`
+ this.line.style.backgroundColor = 'var(--brand5)'
+ this.line.style.opacity = '0.4'
+ } else {
+ this.line.style.opacity = '0'
+ return
+ }
+ }
+
+ destroy() {
+ this.line.remove()
+ }
+}
+
+function getNearestHeadingFromPos(state: EditorState, pos: number) {
+ const $pos = state.doc.resolve(pos)
+ const maxDepth = $pos.depth
+ let group = $pos.node(maxDepth)
+ let heading = group.firstChild
+ let depth = maxDepth
+
+ if (maxDepth > 3) {
+ while (true) {
+ if (depth < 0) {
+ break
+ }
+
+ if (group.type.name === 'blockContainer' && heading?.type.name === 'heading') {
+ break
+ }
+
+ depth -= 1
+ group = $pos.node(depth)
+ heading = group.firstChild
+ }
+ return {
+ depth,
+ groupStartPos: $pos.start(depth),
+ heading,
+ group,
+ $pos,
+ }
+ }
+
+ return
+}
+
+export function getParentBlockFromPos(state: EditorState, pos: number) {
+ const $pos = state.doc.resolve(pos)
+ const depth = $pos.depth
+
+ // if (depth > 3 && container.type.name == 'blockContainer') {
+ if (depth > 3) {
+ let parent = $pos.node(depth - 3)
+ let parentGroup = $pos.node(depth - 2)
+ let parentPos = $pos.start(depth - 3)
+ return {
+ parentGroup,
+ parentBlock: parent.firstChild,
+ parentPos,
+ depth,
+ $pos,
+ }
+ }
+
+ return
+}
+declare module '@tiptap/core' {
+ interface Commands<ReturnType> {
+ block: {
+ BNCreateBlock: (pos: number) => ReturnType
+ BNDeleteBlock: (posInBlock: number) => ReturnType
+ BNMergeBlocks: (posBetweenBlocks: number) => ReturnType
+ BNSplitBlock: (posInBlock: number, keepType: boolean) => ReturnType
+ BNSplitHeadingBlock: (posInBlock: number) => ReturnType
+ BNUpdateBlock: <BSchema extends BlockSchema>(posInBlock: number, block: PartialBlock<BSchema>) => ReturnType
+ BNCreateOrUpdateBlock: <BSchema extends BlockSchema>(
+ posInBlock: number,
+ block: PartialBlock<BSchema>,
+ ) => ReturnType
+ UpdateGroupChildren: (
+ group: PMNode,
+ groupPos: ResolvedPos,
+ groupLevel: number,
+ listType: BlockChildrenType,
+ indent: number,
+ ) => ReturnType
+ UpdateGroup: (
+ posInBlock: number,
+ listType: BlockChildrenType,
+ tab: boolean,
+ // start?: string,
+ isSank?: boolean,
+ turnInto?: boolean,
+ ) => ReturnType
+ }
+ }
+}
+
+/**
+ * The main "Block node" documents consist of
+ */
+export const BlockContainer = Node.create<{
+ domAttributes?: BlockNoteDOMAttributes
+}>({
+ name: 'blockContainer',
+ group: 'blockContainer',
+ // A block always contains content, and optionally a blockGroup which contains nested blocks
+ content: 'blockContent blockGroup?',
+ // Ensures content-specific keyboard handlers trigger first.
+ priority: 50,
+ defining: true,
+
+ parseHTML() {
+ return [
+ {
+ tag: 'div',
+ getAttrs: (element) => {
+ if (typeof element === 'string') {
+ return false
+ }
+
+ const attrs: Record<string, string> = {}
+ for (const [nodeAttr, HTMLAttr] of Object.entries(BlockAttributes)) {
+ if (element.getAttribute(HTMLAttr)) {
+ attrs[nodeAttr] = element.getAttribute(HTMLAttr)!
+ }
+ }
+
+ if (element.getAttribute('data-node-type') === 'blockContainer') {
+ return attrs
+ }
+
+ return false
+ },
+ },
+ ]
+ },
+
+ renderHTML({ HTMLAttributes }) {
+ const domAttributes = this.options.domAttributes?.blockContainer || {}
+
+ return [
+ 'div',
+ mergeAttributes(HTMLAttributes, {
+ class: styles.blockOuter,
+ 'data-node-type': 'block-outer',
+ }),
+ [
+ 'div',
+ mergeAttributes(
+ {
+ ...domAttributes,
+ class: mergeCSSClasses(styles.block, domAttributes.class),
+ 'data-node-type': this.name,
+ },
+ HTMLAttributes,
+ ),
+ 0,
+ ],
+ ]
+ },
+
+ addCommands() {
+ return {
+ // Creates a new text block at a given position.
+ BNCreateBlock:
+ (pos) =>
+ ({ state, dispatch }) => {
+ const newBlock = state.schema.nodes['blockContainer'].createAndFill()!
+
+ if (dispatch) {
+ state.tr.insert(pos, newBlock)
+ }
+
+ return true
+ },
+ // Deletes a block at a given position.
+ BNDeleteBlock:
+ (posInBlock) =>
+ ({ state, dispatch }) => {
+ const blockInfo = getBlockInfoFromPos(state.doc, posInBlock)
+ if (blockInfo === undefined) {
+ return false
+ }
+
+ const { startPos, endPos } = blockInfo
+
+ if (dispatch) {
+ state.tr.deleteRange(startPos, endPos)
+ }
+
+ return true
+ },
+ // Updates a block at a given position.
+ BNUpdateBlock:
+ (posInBlock, block) =>
+ ({ state, dispatch }) => {
+ const blockInfo = getBlockInfoFromPos(state.doc, posInBlock)
+ if (blockInfo === undefined) {
+ return false
+ }
+
+ const { startPos, endPos, node, contentNode } = blockInfo
+
+ if (dispatch) {
+ // Adds blockGroup node with child blocks if necessary.
+ if (block.children !== undefined && block.children.length > 0) {
+ const childNodes = []
+
+ // Creates ProseMirror nodes for each child block, including their descendants.
+ for (const child of block.children) {
+ childNodes.push(blockToNode(child, state.schema))
+ }
+
+ // Checks if a blockGroup node already exists.
+ if (node.childCount === 2) {
+ // Replaces all child nodes in the existing blockGroup with the ones created earlier.
+ state.tr.replace(
+ startPos + contentNode.nodeSize + 1,
+ endPos - 1,
+ new Slice(Fragment.from(childNodes), 0, 0),
+ )
+ } else {
+ // Inserts a new blockGroup containing the child nodes created earlier.
+ state.tr.insert(
+ startPos + contentNode.nodeSize,
+ state.schema.nodes['blockGroup'].create({}, childNodes),
+ )
+ }
+ }
+
+ // Replaces the blockContent node's content if necessary.
+ if (block.content !== undefined) {
+ let content: PMNode[] = []
+
+ // Checks if the provided content is a string or InlineContent[] type.
+ if (typeof block.content === 'string') {
+ // Adds a single text node with no marks to the content.
+ content.push(state.schema.text(block.content))
+ } else {
+ // Adds a text node with the provided styles converted into marks to the content, for each InlineContent
+ // object.
+ content = inlineContentToNodes(block.content, state.schema)
+ }
+
+ // Replaces the contents of the blockContent node with the previously created text node(s).
+ state.tr.replace(
+ startPos + 1,
+ startPos + contentNode.nodeSize - 1,
+ new Slice(Fragment.from(content), 0, 0),
+ )
+ }
+
+ // Changes the blockContent node type and adds the provided props as attributes. Also preserves all existing
+ // attributes that are compatible with the new type.
+ state.tr.setNodeMarkup(startPos, block.type === undefined ? undefined : state.schema.nodes[block.type], {
+ ...contentNode.attrs,
+ ...block.props,
+ })
+
+ // Adds all provided props as attributes to the parent blockContainer node too, and also preserves existing
+ // attributes.
+ let providedProps = {
+ ...node.attrs,
+ ...block.props,
+ }
+ state.tr.setNodeMarkup(startPos - 1, undefined, providedProps)
+ }
+
+ return true
+ },
+ // Appends the text contents of a block to the nearest previous block, given a position between them. Children of
+ // the merged block are moved out of it first, rather than also being merged.
+ //
+ // In the example below, the position passed into the function is between Block1 and Block2.
+ //
+ // Block1
+ // Block2
+ // Block3
+ // Block4
+ // Block5
+ //
+ // Becomes:
+ //
+ // Block1
+ // Block2Block3
+ // Block4
+ // Block5
+ BNMergeBlocks:
+ (posBetweenBlocks) =>
+ ({ state, dispatch }) => {
+ const nextNodeIsBlock = state.doc.resolve(posBetweenBlocks + 1).node().type.name === 'blockContainer'
+ const prevNodeIsBlock = state.doc.resolve(posBetweenBlocks - 1).node().type.name === 'blockContainer'
+
+ if (!nextNodeIsBlock || !prevNodeIsBlock) {
+ return false
+ }
+
+ const nextBlockInfo = getBlockInfoFromPos(state.doc, posBetweenBlocks + 1)
+
+ const { node, contentNode, startPos, endPos, depth } = nextBlockInfo!
+
+ // Removes a level of nesting all children of the next block by 1 level, if it contains both content and block
+ // group nodes.
+ if (node.childCount === 2) {
+ const childBlocksStart = state.doc.resolve(startPos + contentNode.nodeSize + 1)
+ const childBlocksEnd = state.doc.resolve(endPos - 1)
+ const childBlocksRange = childBlocksStart.blockRange(childBlocksEnd)
+
+ // Moves the block group node inside the block into the block group node that the current block is in.
+ if (dispatch) {
+ state.tr.lift(childBlocksRange!, depth - 1)
+ }
+ }
+
+ let prevBlockEndPos = posBetweenBlocks - 1
+ let prevBlockInfo = getBlockInfoFromPos(state.doc, prevBlockEndPos)
+
+ // Finds the nearest previous block, regardless of nesting level.
+ while (prevBlockInfo!.numChildBlocks > 0) {
+ prevBlockEndPos--
+ prevBlockInfo = getBlockInfoFromPos(state.doc, prevBlockEndPos)
+ if (prevBlockInfo === undefined) {
+ return false
+ }
+ }
+
+ // Deletes next block and adds its text content to the nearest previous block.
+
+ if (dispatch) {
+ dispatch(
+ state.tr
+ .deleteRange(startPos, startPos + contentNode.nodeSize)
+ .replace(prevBlockEndPos - 1, startPos, new Slice(contentNode.content, 0, 0))
+ .scrollIntoView(),
+ )
+
+ state.tr.setSelection(new TextSelection(state.doc.resolve(prevBlockEndPos - 1)))
+ }
+
+ return true
+ },
+ // Splits a block at a given position. Content after the position is moved to a new block below, at the same
+ // nesting level.
+ BNSplitBlock:
+ (posInBlock, keepType) =>
+ ({ state, dispatch }) => {
+ const blockInfo = getBlockInfoFromPos(state.doc, posInBlock)
+ if (blockInfo === undefined) {
+ return false
+ }
+
+ const { contentNode, contentType, startPos, endPos, depth } = blockInfo
+
+ const originalBlockContent = state.doc.cut(startPos + 1, posInBlock)
+ const newBlockContent = state.doc.cut(posInBlock, endPos - 1)
+
+ const newBlock = state.schema.nodes['blockContainer'].createAndFill()!
+
+ const newBlockInsertionPos = endPos + 1
+ const newBlockContentPos = newBlockInsertionPos + 2
+
+ if (dispatch) {
+ // Creates a new block. Since the schema requires it to have a content node, a paragraph node is created
+ // automatically, spanning newBlockContentPos to newBlockContentPos + 1.
+ state.tr.insert(newBlockInsertionPos, newBlock)
+
+ // Replaces the content of the newly created block's content node. Doesn't replace the whole content node so
+ // its type doesn't change.
+ state.tr.replace(
+ newBlockContentPos,
+ newBlockContentPos + 1,
+ newBlockContent.content.size > 0
+ ? new Slice(Fragment.from(newBlockContent), depth + 2, depth + 2)
+ : undefined,
+ )
+
+ // Changes the type of the content node. The range doesn't matter as long as both from and to positions are
+ // within the content node.
+ if (keepType) {
+ state.tr.setBlockType(
+ newBlockContentPos,
+ newBlockContentPos,
+ state.schema.node(contentType).type,
+ contentNode.attrs,
+ )
+ }
+
+ // Sets the selection to the start of the new block's content node.
+ state.tr.setSelection(new TextSelection(state.doc.resolve(newBlockContentPos)))
+
+ // Replaces the content of the original block's content node. Doesn't replace the whole content node so its
+ // type doesn't change.
+ state.tr.replace(
+ startPos + 1,
+ endPos - 1,
+ originalBlockContent.content.size > 0
+ ? new Slice(Fragment.from(originalBlockContent), depth + 2, depth + 2)
+ : undefined,
+ )
+ }
+
+ return true
+ },
+ // Splits a block at a given position. Content after the position is moved to a new block below, at the same
+ // nesting level.
+ BNSplitHeadingBlock:
+ (posInBlock) =>
+ ({ state, dispatch }) => {
+ const blockInfo = getBlockInfoFromPos(state.doc, posInBlock)
+ if (blockInfo === undefined) {
+ return false
+ }
+ let { node, startPos, contentNode, depth } = blockInfo
+ if (node.childCount === 1) {
+ setTimeout(() => {
+ this.editor
+ .chain()
+ .deleteSelection()
+ .BNSplitBlock(state.selection.from, false)
+ .sinkListItem('blockContainer')
+ .UpdateGroup(-1, blockInfo.node.attrs.listType, true)
+ .run()
+ })
greptile
logic: Using setTimeout with editor commands can cause race conditions and inconsistent state. Consider using a synchronous approach or proper state management.
diff block
+import { combineTransactionSteps, Extension, findChildrenInRange, getChangedRanges } from '@tiptap/core'
+import { nanoid } from 'nanoid'
+import { Fragment, Slice } from 'prosemirror-model'
+import { Plugin, PluginKey } from 'prosemirror-state'
+
+function createId() {
+ let id = nanoid(8)
+ return id
+}
+
+/**
+ * Code from Tiptap UniqueID extension (https://tiptap.dev/api/extensions/unique-id)
+ * This extension is licensed under MIT (even though it's part of Tiptap pro).
+ *
+ * If you're a user of BlockNote, we still recommend to support their awesome work and become a sponsor!
+ * https://tiptap.dev/pro
+ */
+
+/**
+ * Removes duplicated values within an array.
+ * Supports numbers, strings and objects.
+ */
+function removeDuplicates(array: any, by = JSON.stringify) {
+ const seen: any = {}
+ return array.filter((item: any) => {
+ const key = by(item)
+ return Object.prototype.hasOwnProperty.call(seen, key) ? false : (seen[key] = true)
+ })
+}
+
+/**
+ * Returns a list of duplicated items within an array.
+ */
+function findDuplicates(items: any) {
+ const filtered = items.filter((el: any, index: number) => items.indexOf(el) !== index)
+ const duplicates = removeDuplicates(filtered)
+ return duplicates
+}
+
+const UniqueID = Extension.create({
+ name: 'uniqueID',
+ // we’ll set a very high priority to make sure this runs first
+ // and is compatible with `appendTransaction` hooks of other extensions
+ priority: 10000,
+ addOptions() {
+ return {
+ attributeName: 'id',
+ types: [],
+ generateID: () => {
+ // Use mock ID if tests are running.
+ if (typeof window !== 'undefined' && (window as any).__TEST_OPTIONS) {
+ const testOptions = (window as any).__TEST_OPTIONS
+ if (testOptions.mockID === undefined) {
+ testOptions.mockID = 0
+ } else {
+ testOptions.mockID++
+ }
+
+ return testOptions.mockID.toString() as string
+ }
+
+ return createId()
+ },
+ filterTransaction: null,
+ }
+ },
+ addGlobalAttributes() {
+ return [
+ {
+ types: this.options.types,
+ attributes: {
+ [this.options.attributeName]: {
+ default: null,
+ parseHTML: (element) => element.getAttribute(`data-${this.options.attributeName}`),
+ renderHTML: (attributes) => ({
+ [`data-${this.options.attributeName}`]: attributes[this.options.attributeName],
+ }),
+ },
+ },
+ },
+ ]
+ },
+ // check initial content for missing ids
+ // onCreate() {
+ // // Don’t do this when the collaboration extension is active
+ // // because this may update the content, so Y.js tries to merge these changes.
+ // // This leads to empty block nodes.
+ // // See: https://github.com/ueberdosis/tiptap/issues/2400
+ // if (
+ // this.editor.extensionManager.extensions.find(
+ // (extension) => extension.name === "collaboration"
+ // )
+ // ) {
+ // return;
+ // }
+ // const { view, state } = this.editor;
+ // const { tr, doc } = state;
+ // const { types, attributeName, generateID } = this.options;
+ // const nodesWithoutId = findChildren(doc, (node) => {
+ // return (
+ // types.includes(node.type.name) && node.attrs[attributeName] === null
+ // );
+ // });
+ // nodesWithoutId.forEach(({ node, pos }) => {
+ // tr.setNodeMarkup(pos, undefined, {
+ // ...node.attrs,
+ // [attributeName]: generateID(),
+ // });
+ // });
+ // tr.setMeta("addToHistory", false);
+ // view.dispatch(tr);
+ // },
+ addProseMirrorPlugins() {
+ let dragSourceElement: any = null
+ let transformPasted = false
greptile
style: Global mutable state variables could cause race conditions in concurrent operations. Consider moving these into the plugin instance scope.
diff block
+import { Box, Menu, Text } from '@mantine/core'
+import { createStyles } from '@mantine/styles'
+import { useEffect, useRef } from 'react'
+
+const MIN_LEFT_MARGIN = 5
+
+export type LinkMenuItemProps = {
+ name: string
+ icon?: JSX.Element
+ hint?: string
+ disabled: boolean
+ isSelected: boolean
+ set: () => void
+}
+
+export function LinkMenuItem(props: LinkMenuItemProps) {
+ const itemRef = useRef<HTMLButtonElement>(null)
+ const { classes } = createStyles({ root: {} })(undefined, {
+ name: 'SuggestionListItem',
+ })
+
+ function isSelected() {
+ const isKeyboardSelected = props.isSelected
+ // props.selectedIndex !== undefined && props.selectedIndex === props.index;
+ const isMouseSelected = itemRef.current?.matches(':hover')
+
+ return isKeyboardSelected || isMouseSelected
+ }
+
+ // Updates HTML "data-hovered" attribute which Mantine uses to set mouse hover styles.
+ // Allows users to "hover" menu items when navigating using the keyboard.
+ function updateSelection() {
+ isSelected()
+ ? itemRef.current?.setAttribute('data-hovered', 'true')
+ : itemRef.current?.removeAttribute('data-hovered')
+ }
+
+ useEffect(() => {
+ // Updates whether the item is selected with the keyboard (triggered on selectedIndex prop change).
+ updateSelection()
+
+ // if (
+ // isSelected() &&
+ // itemRef.current &&
+ // itemRef.current.getBoundingClientRect().left > MIN_LEFT_MARGIN //TODO: Kinda hacky, fix
+ // // This check is needed because initially the menu is initialized somewhere above outside the screen (with left = 1)
+ // // scrollIntoView() is called before the menu is set in the right place, and without the check would scroll to the top of the page every time
+ // ) {
+ // itemRef.current.scrollIntoView({
+ // behavior: 'smooth',
+ // block: 'nearest',
+ // })
+ // }
+ })
+
+ return (
+ <Menu.Item
+ className={classes.root}
+ icon={props.icon}
+ onClick={props.set}
+ disabled={props.disabled}
+ closeMenuOnClick={false}
+ // Ensures an item selected with both mouse & keyboard doesn't get deselected on mouse leave.
+ onMouseLeave={() => {
+ setTimeout(() => {
+ updateSelection()
+ }, 1)
+ }}
greptile
style: 1ms timeout is too short and may cause race conditions. Consider increasing to at least 10-20ms.
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple processes call this simultaneously - changedFiles is a global variable being modified asynchronously
diff block
const CLIEngine = require('eslint').CLIEngine;
const listChangedFiles = require('../shared/listChangedFiles');
-const allPaths = ['**/*.js'];
+const allPaths = ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'];
let changedFiles = null;
+let eslintCache = new Map();
-function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options) {
- const cli = new CLIEngine(options);
- const formatter = cli.getFormatter();
+async function runESLintOnFilesWithOptions(filePatterns, onlyChanged, options = {}) {
+ const defaultOptions = {
+ cache: true,
+ cacheLocation: '.eslintcache',
+ fix: false,
+ maxWarnings: 100,
+ ...options
+ };
+
+ const cli = new CLIEngine(defaultOptions);
+ const formatter = cli.getFormatter('stylish');
if (onlyChanged && changedFiles === null) {
- // Calculate lazily.
- changedFiles = [...listChangedFiles()];
+ try {
+ changedFiles = [...await listChangedFiles()];
+ changedFiles.forEach(file => {
+ if (!eslintCache.has(file)) {
+ eslintCache.set(file, null);
+ }
+ });
+ } catch (error) {
+ console.error('Error getting changed files:', error);
+ throw error;
+ }
}
greptile
logic: Race condition possible if multiple calls occur before changedFiles is populated. Consider adding a lock or promise queue.
Want to avoid this bug in your codebase? Try Greptile.
Avoid this bug!