AI翻訳の誤り

ログに多数のこのようなエラーが記録されていました:

メッセージ(2 件のコピーが報告されました)

DiscourseAi::Translation: 投稿 120 の da への翻訳に失敗しました:バリデーションエラー - Raw は空にできません、Cooked も空にできません

/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activerecord-8.0.5/lib/active_record/validations.rb:87:in 'ActiveRecord::Validations#raise_validation_error'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activerecord-8.0.5/lib/active_record/validations.rb:54:in 'ActiveRecord::Validations#save!'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activerecord-8.0.5/lib/active_record/transactions.rb:365:in 'block in ActiveRecord::Transactions#save!'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activerecord-8.0.5/lib/active_record/transactions.rb:417:in 'block (2 levels) in ActiveRecord::Transactions#with_transaction_returning_status'

バックトレース

/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activesupport-8.0.5/lib/active_support/broadcast_logger.rb:218:in 'block in ActiveSupport::BroadcastLogger#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activesupport-8.0.5/lib/active_support/broadcast_logger.rb:217:in 'Array#map'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activesupport-8.0.5/lib/active_support/broadcast_logger.rb:217:in 'ActiveSupport::BroadcastLogger#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/activesupport-8.0.5/lib/active_support/broadcast_logger.rb:129:in 'ActiveSupport::BroadcastLogger#warn'
/var/www/discourse/plugins/discourse-ai/lib/translation/verbose_logger.rb:8:in 'DiscourseAi::Translation::VerboseLogger.log'
/var/www/discourse/plugins/discourse-ai/app/jobs/regular/localize_posts.rb:48:in 'block in Jobs::LocalizePosts#execute'
/var/www/discourse/plugins/discourse-ai/app/jobs/regular/localize_posts.rb:29:in 'Array#each'
/var/www/discourse/plugins/discourse-ai/app/jobs/regular/localize_posts.rb:29:in 'Jobs::LocalizePosts#execute'
/var/www/discourse/app/jobs/base.rb:318:in 'block (2 levels) in Jobs::Base#perform'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/rails_multisite-7.0.0/lib/rails_multisite/connection_management/null_instance.rb:49:in 'RailsMultisite::ConnectionManagement::NullInstance#with_connection'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/rails_multisite-7.0.0/lib/rails_multisite/connection_management.rb:17:in 'RailsMultisite::ConnectionManagement.with_connection'
/var/www/discourse/app/jobs/base.rb:305:in 'block in Jobs::Base#perform'
/var/www/discourse/app/jobs/base.rb:301:in 'Array#each'
/var/www/discourse/app/jobs/base.rb:301:in 'Jobs::Base#perform'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:220:in 'Sidekiq::Processor#execute_job'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:185:in 'block (4 levels) in Sidekiq::Processor#process'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:180:in 'Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:183:in 'block in Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/lib/sidekiq/suppress_user_email_errors.rb:6:in 'Sidekiq::SuppressUserEmailErrors#call'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:182:in 'Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:183:in 'block in Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/lib/sidekiq/discourse_event.rb:6:in 'Sidekiq::DiscourseEvent#call'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:182:in 'Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:183:in 'block in Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/lib/sidekiq/pausable.rb:131:in 'Sidekiq::Pausable#call'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:182:in 'Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:183:in 'block in Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/job/interrupt_handler.rb:9:in 'Sidekiq::Job::InterruptHandler#call'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:182:in 'Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:183:in 'block in Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/metrics/tracking.rb:26:in 'Sidekiq::Metrics::ExecutionTracker#track'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/metrics/tracking.rb:134:in 'Sidekiq::Metrics::Middleware#call'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:182:in 'Sidekiq::Middleware::Chain#traverse'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/middleware/chain.rb:173:in 'Sidekiq::Middleware::Chain#invoke'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:184:in 'block (3 levels) in Sidekiq::Processor#process'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:145:in 'block (6 levels) in Sidekiq::Processor#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/job_retry.rb:118:in 'Sidekiq::JobRetry#local'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:144:in 'block (5 levels) in Sidekiq::Processor#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/config.rb:39:in 'block in <class:Config>'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:139:in 'block (4 levels) in Sidekiq::Processor#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:281:in 'Sidekiq::Processor#stats'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:134:in 'block (3 levels) in Sidekiq::Processor#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/job_logger.rb:15:in 'Sidekiq::JobLogger#call'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:133:in 'block (2 levels) in Sidekiq::Processor#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/job_retry.rb:85:in 'Sidekiq::JobRetry#global'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:132:in 'block in Sidekiq::Processor#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/job_logger.rb:40:in 'Sidekiq::JobLogger#prepare'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:131:in 'Sidekiq::Processor#dispatch'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:183:in 'block (2 levels) in Sidekiq::Processor#process'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:182:in 'Thread.handle_interrupt'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:182:in 'block in Sidekiq::Processor#process'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:181:in 'Thread.handle_interrupt'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:181:in 'Sidekiq::Processor#process'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:86:in 'Sidekiq::Processor#process_one'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/processor.rb:76:in 'Sidekiq::Processor#run'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/component.rb:10:in 'Sidekiq::Component#watchdog'
/var/www/discourse/vendor/bundle/ruby/3.4.0/gems/sidekiq-7.3.10/lib/sidekiq/component.rb:19:in 'block in Sidekiq::Component#safe_thread'

次に、投稿 120 の内容を確認してください(/p/120 に移動するだけで確認できます)。その後、その投稿を翻訳しようとした際の LLM の応答を確認してください(ai_api_audit_logs テーブルを確認します)。

「いいね!」 1

このクエリで監査ログを確認すると

SELECT 
  feature_name,
  created_at,
  post_id,
  response_tokens
FROM 
  ai_api_audit_logs
WHERE 
  post_id IS NOT NULL
  AND created_at > :start_date
  AND (response_tokens IS NULL OR response_tokens != 1)
ORDER BY 
  created_at DESC

ログに表示されている日時付近のその投稿に関する結果は何も返ってきません。
他に確認すべきことはありますか?

はい、WHERE から response_tokens 節全体を削除してください。

はい、まず先にタイプミスがありました。PM ではなく、/p/120 を使うべきでした。/t/120 を使っていました。

/p/120https://discussion.mcebuddy2x.com/t/63/1 を指します。

その後、テーブルに対して更新されたクエリを実行しましたが、ログのタイムスタンプ(木曜 午後 11:25)に一致するエントリは見つかりませんでした。

タイムスタンプに最も近い以下のエントリのみがテーブルに存在します。

feature_name created_at post response_tokens
translation 2026-05-07T23:26:15.471Z https://discussion.mcebuddy2x.com/t/5897/1 2423
translation 2026-05-07T23:11:23.504Z https://discussion.mcebuddy2x.com/t/5896/2 425

上記のどの投稿 ID も、エラーログにある投稿 ID と一致しません。

その投稿 ID への参照がログに現れるのは、ずっと後の時間です。

feature_name created_at post response_tokens
translation 2026-05-08T03:10:18.214Z https://discussion.mcebuddy2x.com/t/63/1 0

レスポンストークンはデバッグには役に立ちません。生レスポンスを表示する必要があります。

要するに、ログと同じタイムスタンプと投稿IDを持つレスポンスが一切見当たりません。

その投稿IDに関する次のデータダンプは約1時間後に現れ、以下のような内容が含まれています:

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" given","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":".\n\n","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":"Probably","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" keep","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" truncated","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":".\n\n","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":"Now","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" produce","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" final","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" JSON","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" with","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" \"","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":"output","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":"\":","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" translation","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" string","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":".\n\n","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":"Make","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" sure","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" to","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" preserve","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" line","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" breaks","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" exactly","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" as","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" original","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":".\n\n","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":"Let's","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" construct","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" final","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":" translation","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

data: {"id":"chatcmpl-35a85986-7a6b-4169-893a-b43bcc22b1f2","object":"chat.completion.chunk","created":1778209824,"model":"openai/gpt-oss-120b","system_fingerprint":"fp_d66938db44","choices":[{"index":0,"delta":{"reasoning":".\n\n","channel":"analysis"},"logprobs":null,"finish_reason":null}]}

event: error
data: {"error":{"message":"Failed to generate JSON. Please adjust your prompt. See 'failed_generation' for more details.","type":"invalid_request_error","code":"json_validate_failed","failed_generation":"max completion tokens reached before generating a valid document","status_code":400}}

推論トークンが max_tokens 完了制限のすべてのトークンを消費してしまっています。これが翻訳に推論機能を使用しないもう一つの理由です。

これに対処するためのサイト設定がありましたが、

その後、以下のプルリクエストで削除されました。