2023-03-25 17:19:29 +00:00
/ *
THIS IS A GENERATED / BUNDLED FILE BY ESBUILD
if you want to view the source , please visit the github repository of this plugin
* /
var _ _create = Object . create ;
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _getProtoOf = Object . getPrototypeOf ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _commonJS = ( cb , mod ) => function _ _require ( ) {
return mod || ( 0 , cb [ _ _getOwnPropNames ( cb ) [ 0 ] ] ) ( ( mod = { exports : { } } ) . exports , mod ) , mod . exports ;
} ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toESM = ( mod , isNodeMode , target ) => ( target = mod != null ? _ _create ( _ _getProtoOf ( mod ) ) : { } , _ _copyProps (
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || ! mod || ! mod . _ _esModule ? _ _defProp ( target , "default" , { value : mod , enumerable : true } ) : target ,
mod
) ) ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// node_modules/sse/lib/sse.js
var require _sse = _ _commonJS ( {
"node_modules/sse/lib/sse.js" ( exports ) {
var SSE2 = function ( url , options ) {
if ( ! ( this instanceof SSE2 ) ) {
return new SSE2 ( url , options ) ;
}
this . INITIALIZING = - 1 ;
this . CONNECTING = 0 ;
this . OPEN = 1 ;
this . CLOSED = 2 ;
this . url = url ;
options = options || { } ;
this . headers = options . headers || { } ;
this . payload = options . payload !== void 0 ? options . payload : "" ;
this . method = options . method || ( this . payload && "POST" || "GET" ) ;
this . withCredentials = ! ! options . withCredentials ;
this . FIELD _SEPARATOR = ":" ;
this . listeners = { } ;
this . xhr = null ;
this . readyState = this . INITIALIZING ;
this . progress = 0 ;
this . chunk = "" ;
this . addEventListener = function ( type , listener ) {
if ( this . listeners [ type ] === void 0 ) {
this . listeners [ type ] = [ ] ;
}
if ( this . listeners [ type ] . indexOf ( listener ) === - 1 ) {
this . listeners [ type ] . push ( listener ) ;
}
} ;
this . removeEventListener = function ( type , listener ) {
if ( this . listeners [ type ] === void 0 ) {
return ;
}
var filtered = [ ] ;
this . listeners [ type ] . forEach ( function ( element ) {
if ( element !== listener ) {
filtered . push ( element ) ;
}
} ) ;
if ( filtered . length === 0 ) {
delete this . listeners [ type ] ;
} else {
this . listeners [ type ] = filtered ;
}
} ;
this . dispatchEvent = function ( e ) {
if ( ! e ) {
return true ;
}
e . source = this ;
var onHandler = "on" + e . type ;
if ( this . hasOwnProperty ( onHandler ) ) {
this [ onHandler ] . call ( this , e ) ;
if ( e . defaultPrevented ) {
return false ;
}
}
if ( this . listeners [ e . type ] ) {
return this . listeners [ e . type ] . every ( function ( callback ) {
callback ( e ) ;
return ! e . defaultPrevented ;
} ) ;
}
return true ;
} ;
this . _setReadyState = function ( state ) {
var event = new CustomEvent ( "readystatechange" ) ;
event . readyState = state ;
this . readyState = state ;
this . dispatchEvent ( event ) ;
} ;
this . _onStreamFailure = function ( e ) {
var event = new CustomEvent ( "error" ) ;
event . data = e . currentTarget . response ;
this . dispatchEvent ( event ) ;
this . close ( ) ;
} ;
this . _onStreamAbort = function ( e ) {
this . dispatchEvent ( new CustomEvent ( "abort" ) ) ;
this . close ( ) ;
} ;
this . _onStreamProgress = function ( e ) {
if ( ! this . xhr ) {
return ;
}
if ( this . xhr . status !== 200 ) {
this . _onStreamFailure ( e ) ;
return ;
}
if ( this . readyState == this . CONNECTING ) {
this . dispatchEvent ( new CustomEvent ( "open" ) ) ;
this . _setReadyState ( this . OPEN ) ;
}
var data = this . xhr . responseText . substring ( this . progress ) ;
this . progress += data . length ;
data . split ( /(\r\n|\r|\n){2}/g ) . forEach ( function ( part ) {
if ( part . trim ( ) . length === 0 ) {
this . dispatchEvent ( this . _parseEventChunk ( this . chunk . trim ( ) ) ) ;
this . chunk = "" ;
} else {
this . chunk += part ;
}
} . bind ( this ) ) ;
} ;
this . _onStreamLoaded = function ( e ) {
this . _onStreamProgress ( e ) ;
this . dispatchEvent ( this . _parseEventChunk ( this . chunk ) ) ;
this . chunk = "" ;
} ;
this . _parseEventChunk = function ( chunk ) {
if ( ! chunk || chunk . length === 0 ) {
return null ;
}
var e = { "id" : null , "retry" : null , "data" : "" , "event" : "message" } ;
chunk . split ( /\n|\r\n|\r/ ) . forEach ( function ( line ) {
line = line . trimRight ( ) ;
var index = line . indexOf ( this . FIELD _SEPARATOR ) ;
if ( index <= 0 ) {
return ;
}
var field = line . substring ( 0 , index ) ;
if ( ! ( field in e ) ) {
return ;
}
var value = line . substring ( index + 1 ) . trimLeft ( ) ;
if ( field === "data" ) {
e [ field ] += value ;
} else {
e [ field ] = value ;
}
} . bind ( this ) ) ;
var event = new CustomEvent ( e . event ) ;
event . data = e . data ;
event . id = e . id ;
return event ;
} ;
this . _checkStreamClosed = function ( ) {
if ( ! this . xhr ) {
return ;
}
if ( this . xhr . readyState === XMLHttpRequest . DONE ) {
this . _setReadyState ( this . CLOSED ) ;
}
} ;
this . stream = function ( ) {
this . _setReadyState ( this . CONNECTING ) ;
this . xhr = new XMLHttpRequest ( ) ;
this . xhr . addEventListener ( "progress" , this . _onStreamProgress . bind ( this ) ) ;
this . xhr . addEventListener ( "load" , this . _onStreamLoaded . bind ( this ) ) ;
this . xhr . addEventListener ( "readystatechange" , this . _checkStreamClosed . bind ( this ) ) ;
this . xhr . addEventListener ( "error" , this . _onStreamFailure . bind ( this ) ) ;
this . xhr . addEventListener ( "abort" , this . _onStreamAbort . bind ( this ) ) ;
this . xhr . open ( this . method , this . url ) ;
for ( var header in this . headers ) {
this . xhr . setRequestHeader ( header , this . headers [ header ] ) ;
}
this . xhr . withCredentials = this . withCredentials ;
this . xhr . send ( this . payload ) ;
} ;
this . close = function ( ) {
if ( this . readyState === this . CLOSED ) {
return ;
}
this . xhr . abort ( ) ;
this . xhr = null ;
this . _setReadyState ( this . CLOSED ) ;
} ;
} ;
if ( typeof exports !== "undefined" ) {
exports . SSE = SSE2 ;
}
}
} ) ;
// main.ts
var main _exports = { } ;
_ _export ( main _exports , {
ChatTemplates : ( ) => ChatTemplates ,
default : ( ) => ChatGPT _MD
} ) ;
module . exports = _ _toCommonJS ( main _exports ) ;
2023-03-26 16:25:42 +00:00
var import _obsidian3 = require ( "obsidian" ) ;
2023-03-25 17:19:29 +00:00
// stream.ts
2023-03-26 16:25:42 +00:00
var import _obsidian2 = require ( "obsidian" ) ;
2023-03-25 17:19:29 +00:00
var import _sse = _ _toESM ( require _sse ( ) ) ;
// helpers.ts
2023-03-26 16:25:42 +00:00
var import _obsidian = require ( "obsidian" ) ;
2023-03-25 17:19:29 +00:00
var unfinishedCodeBlock = ( txt ) => {
const matcher = txt . match ( /```/g ) ;
if ( ! matcher ) {
return false ;
}
if ( matcher . length % 2 !== 0 )
console . log ( "[ChatGPT MD] unclosed code block detected" ) ;
return matcher . length % 2 !== 0 ;
} ;
2023-03-26 16:25:42 +00:00
var writeInferredTitleToEditor = async ( vault , view , fileManager , chatFolder , title ) => {
try {
const file = view . file ;
const folder = chatFolder . replace ( /\/$/ , "" ) ;
let newFileName = ` ${ folder } / ${ title } .md ` ;
let i = 1 ;
while ( await vault . adapter . exists ( newFileName ) ) {
newFileName = ` ${ folder } / ${ title } ( ${ i } ).md ` ;
i ++ ;
}
fileManager . renameFile ( file , newFileName ) ;
} catch ( err ) {
new import _obsidian . Notice ( "[ChatGPT MD] Error writing inferred title to editor" ) ;
console . log ( "[ChatGPT MD] Error writing inferred title to editor" , err ) ;
throw err ;
}
} ;
var createFolderModal = async ( app2 , vault , folderName , folderPath ) => {
const folderCreationModal = new FolderCreationModal (
app2 ,
folderName ,
folderPath
) ;
folderCreationModal . open ( ) ;
const result = await folderCreationModal . waitForModalValue ( ) ;
if ( result ) {
console . log ( "[ChatGPT MD] Creating folder" ) ;
await vault . createFolder ( folderPath ) ;
} else {
console . log ( "[ChatGPT MD] Not creating folder" ) ;
}
return result ;
} ;
var FolderCreationModal = class extends import _obsidian . Modal {
constructor ( app2 , folderName , folderPath ) {
super ( app2 ) ;
this . folderName = folderName ;
this . folderPath = folderPath ;
this . result = false ;
this . modalPromise = new Promise ( ( resolve ) => {
this . resolveModalPromise = resolve ;
} ) ;
}
onOpen ( ) {
const { contentEl } = this ;
contentEl . createEl ( "h2" , {
text : ` [ChatGPT MD] No ${ this . folderName } folder found. `
} ) ;
contentEl . createEl ( "p" , {
text : ` If you choose "Yes, Create", the plugin will automatically create a folder at: ${ this . folderPath } . You can change this path in the plugin settings. `
} ) ;
new import _obsidian . Setting ( contentEl ) . addButton (
( btn ) => btn . setButtonText ( "Yes, Create Folder" ) . setTooltip ( "Create folder" ) . setCta ( ) . onClick ( ( ) => {
this . result = true ;
this . resolveModalPromise ( this . result ) ;
this . close ( ) ;
} )
) ;
new import _obsidian . Setting ( contentEl ) . addButton (
( btn ) => btn . setButtonText ( "No, I'll create it myself" ) . setTooltip ( "Cancel" ) . setCta ( ) . onClick ( ( ) => {
this . result = false ;
this . resolveModalPromise ( this . result ) ;
this . close ( ) ;
} )
) ;
}
waitForModalValue ( ) {
return this . modalPromise ;
}
onClose ( ) {
const { contentEl } = this ;
contentEl . empty ( ) ;
}
} ;
2023-03-25 17:19:29 +00:00
// stream.ts
var StreamManager = class {
constructor ( ) {
this . sse = null ;
this . manualClose = false ;
this . stopStreaming = ( ) => {
2023-03-26 16:25:42 +00:00
if ( import _obsidian2 . Platform . isMobile ) {
new import _obsidian2 . Notice ( "[ChatGPT MD] Mobile not supported." ) ;
2023-03-25 17:19:29 +00:00
return ;
}
if ( this . sse ) {
this . manualClose = true ;
this . sse . close ( ) ;
console . log ( "[ChatGPT MD] SSE manually closed" ) ;
this . sse = null ;
}
} ;
this . streamSSE = async ( editor , apiKey , url , options , setAtCursor , headingPrefix ) => {
return new Promise ( ( resolve , reject ) => {
try {
console . log ( "[ChatGPT MD] streamSSE" , options ) ;
const source = new import _sse . SSE ( url , {
headers : {
"Content-Type" : "application/json" ,
Authorization : ` Bearer ${ apiKey } `
} ,
method : "POST" ,
payload : JSON . stringify ( options )
} ) ;
this . sse = source ;
let txt = "" ;
let initialCursorPosCh = editor . getCursor ( ) . ch ;
let initialCursorPosLine = editor . getCursor ( ) . line ;
source . addEventListener ( "open" , ( e ) => {
console . log ( "[ChatGPT MD] SSE Opened" ) ;
const newLine = `
< hr class = "__chatgpt_plugin" >
$ { headingPrefix } role : : assistant
` ;
editor . replaceRange ( newLine , editor . getCursor ( ) ) ;
const cursor = editor . getCursor ( ) ;
const newCursor = {
line : cursor . line ,
ch : cursor . ch + newLine . length
} ;
editor . setCursor ( newCursor ) ;
initialCursorPosCh = newCursor . ch ;
initialCursorPosLine = newCursor . line ;
} ) ;
source . addEventListener ( "message" , ( e ) => {
if ( e . data != "[DONE]" ) {
const payload = JSON . parse ( e . data ) ;
const text = payload . choices [ 0 ] . delta . content ;
if ( ! text ) {
return ;
}
const cursor = editor . getCursor ( ) ;
const convPos = editor . posToOffset ( cursor ) ;
const cm6 = editor . cm ;
const transaction = cm6 . state . update ( {
changes : {
from : convPos ,
to : convPos ,
insert : text
}
} ) ;
cm6 . dispatch ( transaction ) ;
txt += text ;
const newCursor = {
line : cursor . line ,
ch : cursor . ch + text . length
} ;
editor . setCursor ( newCursor ) ;
} else {
source . close ( ) ;
console . log ( "[ChatGPT MD] SSE Closed" ) ;
if ( unfinishedCodeBlock ( txt ) ) {
txt += "\n```" ;
}
const cursor = editor . getCursor ( ) ;
editor . replaceRange (
txt ,
{
line : initialCursorPosLine ,
ch : initialCursorPosCh
} ,
cursor
) ;
const newCursor = {
line : initialCursorPosLine ,
ch : initialCursorPosCh + txt . length
} ;
editor . setCursor ( newCursor ) ;
if ( ! setAtCursor ) {
editor . replaceRange ( "" , newCursor , {
line : Infinity ,
ch : Infinity
} ) ;
} else {
2023-03-26 16:25:42 +00:00
new import _obsidian2 . Notice (
2023-03-25 17:19:29 +00:00
"[ChatGPT MD] Text pasted at cursor may leave artifacts. Please remove them manually. ChatGPT MD cannot safely remove text when pasting at cursor."
) ;
}
resolve ( txt ) ;
}
} ) ;
source . addEventListener ( "abort" , ( e ) => {
console . log ( "[ChatGPT MD] SSE Closed Event" ) ;
if ( this . manualClose ) {
resolve ( txt ) ;
}
} ) ;
source . addEventListener ( "error" , ( e ) => {
try {
console . log (
"[ChatGPT MD] SSE Error: " ,
JSON . parse ( e . data )
) ;
source . close ( ) ;
console . log ( "[ChatGPT MD] SSE Closed" ) ;
reject ( JSON . parse ( e . data ) ) ;
} catch ( err ) {
console . log ( "[ChatGPT MD] Unknown Error: " , e ) ;
source . close ( ) ;
console . log ( "[ChatGPT MD] SSE Closed" ) ;
reject ( e ) ;
}
} ) ;
source . stream ( ) ;
} catch ( err ) {
console . log ( "SSE Error" , err ) ;
reject ( err ) ;
}
} ) ;
} ;
}
} ;
// main.ts
var DEFAULT _SETTINGS = {
apiKey : "default" ,
defaultChatFrontmatter : "---\nsystem_commands: ['I am a helpful assistant.']\ntemperature: 0\ntop_p: 1\nmax_tokens: 512\npresence_penalty: 1\nfrequency_penalty: 1\nstream: true\nstop: null\nn: 1\nmodel: gpt-3.5-turbo\n---" ,
stream : true ,
chatTemplateFolder : "ChatGPT_MD/templates" ,
chatFolder : "ChatGPT_MD/chats" ,
generateAtCursor : false ,
autoInferTitle : false ,
dateFormat : "YYYYMMDDhhmmss" ,
headingLevel : 0
} ;
var DEFAULT _URL = ` https://api.openai.com/v1/chat/completions ` ;
2023-03-26 16:25:42 +00:00
var ChatGPT _MD = class extends import _obsidian3 . Plugin {
2023-03-25 17:19:29 +00:00
async callOpenAIAPI ( streamManager , editor , messages , model = "gpt-3.5-turbo" , max _tokens = 250 , temperature = 0.3 , top _p = 1 , presence _penalty = 0.5 , frequency _penalty = 0.5 , stream = true , stop = null , n = 1 , logit _bias = null , user = null , url = DEFAULT _URL ) {
try {
console . log ( "calling openai api" ) ;
if ( stream ) {
const options = {
model ,
messages ,
max _tokens ,
temperature ,
top _p ,
presence _penalty ,
frequency _penalty ,
stream ,
stop ,
n
// logit_bias: logit_bias, // not yet supported
// user: user, // not yet supported
} ;
const response = await streamManager . streamSSE (
editor ,
this . settings . apiKey ,
url ,
options ,
this . settings . generateAtCursor ,
this . getHeadingPrefix ( )
) ;
console . log ( "response from stream" , response ) ;
return { fullstr : response , mode : "streaming" } ;
} else {
2023-03-26 16:25:42 +00:00
const responseUrl = await ( 0 , import _obsidian3 . requestUrl ) ( {
2023-03-25 17:19:29 +00:00
url ,
method : "POST" ,
headers : {
Authorization : ` Bearer ${ this . settings . apiKey } ` ,
"Content-Type" : "application/json"
} ,
contentType : "application/json" ,
body : JSON . stringify ( {
model ,
messages ,
max _tokens ,
temperature ,
top _p ,
presence _penalty ,
frequency _penalty ,
stream ,
stop ,
n
// logit_bias: logit_bias, // not yet supported
// user: user, // not yet supported
} ) ,
throw : false
} ) ;
try {
const json = responseUrl . json ;
if ( json && json . error ) {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
2023-03-25 17:19:29 +00:00
` [ChatGPT MD] Stream = False Error :: ${ json . error . message } `
) ;
throw new Error ( JSON . stringify ( json . error ) ) ;
}
} catch ( err ) {
if ( err instanceof SyntaxError ) {
} else {
throw new Error ( err ) ;
}
}
const response = responseUrl . text ;
const responseJSON = JSON . parse ( response ) ;
return responseJSON . choices [ 0 ] . message . content ;
}
} catch ( err ) {
if ( err instanceof Object ) {
if ( err . error ) {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice ( ` [ChatGPT MD] Error :: ${ err . error . message } ` ) ;
2023-03-25 17:19:29 +00:00
throw new Error ( JSON . stringify ( err . error ) ) ;
} else {
if ( url !== DEFAULT _URL ) {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
"[ChatGPT MD] Issue calling specified url: " + url
) ;
throw new Error (
"[ChatGPT MD] Issue calling specified url: " + url
) ;
2023-03-25 17:19:29 +00:00
} else {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
` [ChatGPT MD] Error :: ${ JSON . stringify ( err ) } `
) ;
2023-03-25 17:19:29 +00:00
throw new Error ( JSON . stringify ( err ) ) ;
}
}
}
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
2023-03-25 17:19:29 +00:00
"issue calling OpenAI API, see console for more details"
) ;
throw new Error (
"issue calling OpenAI API, see error for more details: " + err
) ;
}
}
addHR ( editor , role ) {
const newLine = `
< hr class = "__chatgpt_plugin" >
$ { this . getHeadingPrefix ( ) } role : : $ { role }
` ;
editor . replaceRange ( newLine , editor . getCursor ( ) ) ;
const cursor = editor . getCursor ( ) ;
const newCursor = {
line : cursor . line ,
ch : cursor . ch + newLine . length
} ;
editor . setCursor ( newCursor ) ;
}
getFrontmatter ( view ) {
var _a ;
try {
const noteFile = app . workspace . getActiveFile ( ) ;
if ( ! noteFile ) {
throw new Error ( "no active file" ) ;
}
const metaMatter = ( _a = app . metadataCache . getFileCache ( noteFile ) ) == null ? void 0 : _a . frontmatter ;
const shouldStream = ( metaMatter == null ? void 0 : metaMatter . stream ) !== void 0 ? metaMatter . stream : this . settings . stream !== void 0 ? this . settings . stream : true ;
const temperature = ( metaMatter == null ? void 0 : metaMatter . temperature ) !== void 0 ? metaMatter . temperature : 0.3 ;
const frontmatter = {
title : ( metaMatter == null ? void 0 : metaMatter . title ) || view . file . basename ,
tags : ( metaMatter == null ? void 0 : metaMatter . tags ) || [ ] ,
model : ( metaMatter == null ? void 0 : metaMatter . model ) || "gpt-3.5-turbo" ,
temperature ,
top _p : ( metaMatter == null ? void 0 : metaMatter . top _p ) || 1 ,
presence _penalty : ( metaMatter == null ? void 0 : metaMatter . presence _penalty ) || 0 ,
frequency _penalty : ( metaMatter == null ? void 0 : metaMatter . frequency _penalty ) || 0 ,
stream : shouldStream ,
max _tokens : ( metaMatter == null ? void 0 : metaMatter . max _tokens ) || 512 ,
stop : ( metaMatter == null ? void 0 : metaMatter . stop ) || null ,
n : ( metaMatter == null ? void 0 : metaMatter . n ) || 1 ,
logit _bias : ( metaMatter == null ? void 0 : metaMatter . logit _bias ) || null ,
user : ( metaMatter == null ? void 0 : metaMatter . user ) || null ,
system _commands : ( metaMatter == null ? void 0 : metaMatter . system _commands ) || null ,
url : ( metaMatter == null ? void 0 : metaMatter . url ) || DEFAULT _URL
} ;
return frontmatter ;
} catch ( err ) {
throw new Error ( "Error getting frontmatter" ) ;
}
}
splitMessages ( text ) {
try {
const messages = text . split ( '<hr class="__chatgpt_plugin">' ) ;
return messages ;
} catch ( err ) {
throw new Error ( "Error splitting messages" + err ) ;
}
}
moveCursorToEndOfFile ( editor ) {
try {
const length = editor . lastLine ( ) ;
const newCursor = {
line : length + 1 ,
ch : 0
} ;
editor . setCursor ( newCursor ) ;
return newCursor ;
} catch ( err ) {
throw new Error ( "Error moving cursor to end of file" + err ) ;
}
}
removeYMLFromMessage ( message ) {
try {
const YAMLFrontMatter = /---\s*[\s\S]*?\s*---/g ;
const newMessage = message . replace ( YAMLFrontMatter , "" ) ;
return newMessage ;
} catch ( err ) {
throw new Error ( "Error removing YML from message" + err ) ;
}
}
extractRoleAndMessage ( message ) {
try {
if ( message . includes ( "role::" ) ) {
const role = message . split ( "role::" ) [ 1 ] . split ( "\n" ) [ 0 ] . trim ( ) ;
const content = message . split ( "role::" ) [ 1 ] . split ( "\n" ) . slice ( 1 ) . join ( "\n" ) . trim ( ) ;
return { role , content } ;
} else {
return { role : "user" , content : message } ;
}
} catch ( err ) {
throw new Error ( "Error extracting role and message" + err ) ;
}
}
getHeadingPrefix ( ) {
const headingLevel = this . settings . headingLevel ;
if ( headingLevel === 0 ) {
return "" ;
} else if ( headingLevel > 6 ) {
return "#" . repeat ( 6 ) + " " ;
}
return "#" . repeat ( headingLevel ) + " " ;
}
appendMessage ( editor , role , message ) {
const newLine = `
< hr class = "__chatgpt_plugin" >
$ { this . getHeadingPrefix ( ) } role : : $ { role }
$ { message }
< hr class = "__chatgpt_plugin" >
$ { this . getHeadingPrefix ( ) } role : : user
` ;
editor . replaceRange ( newLine , editor . getCursor ( ) ) ;
}
async inferTitleFromMessages ( messages ) {
console . log ( "[ChtGPT MD] Inferring Title" ) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice ( "[ChatGPT] Inferring title from messages..." ) ;
2023-03-25 17:19:29 +00:00
try {
if ( messages . length < 2 ) {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
2023-03-25 17:19:29 +00:00
"Not enough messages to infer title. Minimum 2 messages."
) ;
return ;
}
2023-03-26 16:25:42 +00:00
const prompt = ` Infer title from the summary of the content of these messages. The title **cannot** contain any of the following characters: colon, back slash or forward slash. Just return the title.
2023-03-25 17:19:29 +00:00
Messages :
$ { JSON . stringify (
messages
) } ` ;
const titleMessage = [
{
role : "user" ,
content : prompt
}
] ;
2023-03-26 16:25:42 +00:00
const responseUrl = await ( 0 , import _obsidian3 . requestUrl ) ( {
2023-03-25 17:19:29 +00:00
url : ` https://api.openai.com/v1/chat/completions ` ,
method : "POST" ,
headers : {
Authorization : ` Bearer ${ this . settings . apiKey } ` ,
"Content-Type" : "application/json"
} ,
contentType : "application/json" ,
body : JSON . stringify ( {
model : "gpt-3.5-turbo" ,
messages : titleMessage ,
max _tokens : 50 ,
temperature : 0
} ) ,
throw : false
} ) ;
const response = responseUrl . text ;
const responseJSON = JSON . parse ( response ) ;
2023-03-26 16:25:42 +00:00
return responseJSON . choices [ 0 ] . message . content . replace ( /[:/\\]/g , "" ) . replace ( "Title" , "" ) . replace ( "title" , "" ) . trim ( ) ;
2023-03-25 17:19:29 +00:00
} catch ( err ) {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice ( "[ChatGPT MD] Error inferring title from messages" ) ;
throw new Error (
"[ChatGPT MD] Error inferring title from messages" + err
) ;
2023-03-25 17:19:29 +00:00
}
}
// only proceed to infer title if the title is in timestamp format
isTitleTimestampFormat ( title ) {
try {
const format = this . settings . dateFormat ;
const pattern = this . generateDatePattern ( format ) ;
return title . length == format . length && pattern . test ( title ) ;
} catch ( err ) {
throw new Error (
"Error checking if title is in timestamp format" + err
) ;
}
}
generateDatePattern ( format ) {
const pattern = format . replace ( /[-/\\^$*+?.()|[\]{}]/g , "\\$&" ) . replace ( "YYYY" , "\\d{4}" ) . replace ( "MM" , "\\d{2}" ) . replace ( "DD" , "\\d{2}" ) . replace ( "hh" , "\\d{2}" ) . replace ( "mm" , "\\d{2}" ) . replace ( "ss" , "\\d{2}" ) ;
return new RegExp ( ` ^ ${ pattern } $ ` ) ;
}
// get date from format
getDate ( date , format = "YYYYMMDDhhmmss" ) {
const year = date . getFullYear ( ) ;
const month = date . getMonth ( ) + 1 ;
const day = date . getDate ( ) ;
const hour = date . getHours ( ) ;
const minute = date . getMinutes ( ) ;
const second = date . getSeconds ( ) ;
const paddedMonth = month . toString ( ) . padStart ( 2 , "0" ) ;
const paddedDay = day . toString ( ) . padStart ( 2 , "0" ) ;
const paddedHour = hour . toString ( ) . padStart ( 2 , "0" ) ;
const paddedMinute = minute . toString ( ) . padStart ( 2 , "0" ) ;
const paddedSecond = second . toString ( ) . padStart ( 2 , "0" ) ;
return format . replace ( "YYYY" , year . toString ( ) ) . replace ( "MM" , paddedMonth ) . replace ( "DD" , paddedDay ) . replace ( "hh" , paddedHour ) . replace ( "mm" , paddedMinute ) . replace ( "ss" , paddedSecond ) ;
}
async onload ( ) {
const statusBarItemEl = this . addStatusBarItem ( ) ;
await this . loadSettings ( ) ;
const streamManager = new StreamManager ( ) ;
this . addCommand ( {
id : "call-chatgpt-api" ,
name : "Chat" ,
icon : "message-circle" ,
editorCallback : ( editor , view ) => {
statusBarItemEl . setText ( "[ChatGPT MD] Calling API..." ) ;
const frontmatter = this . getFrontmatter ( view ) ;
const bodyWithoutYML = this . removeYMLFromMessage (
editor . getValue ( )
) ;
const messages = this . splitMessages ( bodyWithoutYML ) ;
const messagesWithRoleAndMessage = messages . map ( ( message ) => {
return this . extractRoleAndMessage ( message ) ;
} ) ;
if ( frontmatter . system _commands ) {
const systemCommands = frontmatter . system _commands ;
messagesWithRoleAndMessage . unshift (
... systemCommands . map ( ( command ) => {
return {
role : "system" ,
content : command
} ;
} )
) ;
}
if ( ! this . settings . generateAtCursor ) {
this . moveCursorToEndOfFile ( editor ) ;
}
2023-03-26 16:25:42 +00:00
if ( import _obsidian3 . Platform . isMobile ) {
new import _obsidian3 . Notice ( "[ChatGPT MD] Calling API" ) ;
2023-03-25 17:19:29 +00:00
}
this . callOpenAIAPI (
streamManager ,
editor ,
messagesWithRoleAndMessage ,
frontmatter . model ,
frontmatter . max _tokens ,
frontmatter . temperature ,
frontmatter . top _p ,
frontmatter . presence _penalty ,
frontmatter . frequency _penalty ,
frontmatter . stream ,
frontmatter . stop ,
frontmatter . n ,
frontmatter . logit _bias ,
frontmatter . user ,
frontmatter . url
) . then ( ( response ) => {
let responseStr = response ;
if ( response . mode === "streaming" ) {
responseStr = response . fullstr ;
const newLine = `
< hr class = "__chatgpt_plugin" >
$ { this . getHeadingPrefix ( ) } role : : user
` ;
editor . replaceRange ( newLine , editor . getCursor ( ) ) ;
const cursor = editor . getCursor ( ) ;
const newCursor = {
line : cursor . line ,
ch : cursor . ch + newLine . length
} ;
editor . setCursor ( newCursor ) ;
} else {
if ( unfinishedCodeBlock ( responseStr ) ) {
responseStr = responseStr + "\n```" ;
}
this . appendMessage (
editor ,
"assistant" ,
responseStr
) ;
}
if ( this . settings . autoInferTitle ) {
const title = view . file . basename ;
const messagesWithResponse = messages . concat ( responseStr ) ;
if ( this . isTitleTimestampFormat ( title ) && messagesWithResponse . length >= 4 ) {
console . log (
"[ChatGPT MD] auto inferring title from messages"
) ;
2023-03-26 16:25:42 +00:00
statusBarItemEl . setText (
"[ChatGPT MD] Calling API..."
) ;
2023-03-25 17:19:29 +00:00
this . inferTitleFromMessages (
messagesWithResponse
2023-03-26 16:25:42 +00:00
) . then ( async ( title2 ) => {
2023-03-25 17:19:29 +00:00
if ( title2 ) {
console . log (
2023-03-26 16:25:42 +00:00
` [ChatGPT MD] automatically inferred title: ${ title2 } . Changing file name... `
2023-03-25 17:19:29 +00:00
) ;
2023-03-26 16:25:42 +00:00
statusBarItemEl . setText ( "" ) ;
await writeInferredTitleToEditor (
this . app . vault ,
view ,
this . app . fileManager ,
this . settings . chatFolder ,
title2
2023-03-25 17:19:29 +00:00
) ;
} else {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
2023-03-25 17:19:29 +00:00
"[ChatGPT MD] Could not infer title" ,
5e3
) ;
}
} ) . catch ( ( err ) => {
console . log ( err ) ;
2023-03-26 16:25:42 +00:00
statusBarItemEl . setText ( "" ) ;
if ( import _obsidian3 . Platform . isMobile ) {
new import _obsidian3 . Notice (
"[ChatGPT MD] Error inferring title. " + err ,
5e3
) ;
}
2023-03-25 17:19:29 +00:00
} ) ;
}
}
statusBarItemEl . setText ( "" ) ;
} ) . catch ( ( err ) => {
2023-03-26 16:25:42 +00:00
if ( import _obsidian3 . Platform . isMobile ) {
new import _obsidian3 . Notice (
2023-03-25 17:19:29 +00:00
"[ChatGPT MD Mobile] Full Error calling API. " + err ,
9e3
) ;
}
statusBarItemEl . setText ( "" ) ;
console . log ( err ) ;
} ) ;
}
} ) ;
this . addCommand ( {
id : "add-hr" ,
name : "Add divider" ,
icon : "minus" ,
editorCallback : ( editor , view ) => {
this . addHR ( editor , "user" ) ;
}
} ) ;
this . addCommand ( {
id : "stop-streaming" ,
name : "Stop streaming" ,
icon : "octagon" ,
editorCallback : ( editor , view ) => {
streamManager . stopStreaming ( ) ;
}
} ) ;
this . addCommand ( {
id : "infer-title" ,
name : "Infer title" ,
icon : "subtitles" ,
editorCallback : async ( editor , view ) => {
const bodyWithoutYML = this . removeYMLFromMessage (
editor . getValue ( )
) ;
const messages = this . splitMessages ( bodyWithoutYML ) ;
2023-03-26 16:25:42 +00:00
statusBarItemEl . setText ( "[ChatGPT MD] Calling API..." ) ;
2023-03-25 17:19:29 +00:00
const title = await this . inferTitleFromMessages ( messages ) ;
2023-03-26 16:25:42 +00:00
statusBarItemEl . setText ( "" ) ;
2023-03-25 17:19:29 +00:00
if ( title ) {
2023-03-26 16:25:42 +00:00
await writeInferredTitleToEditor (
this . app . vault ,
view ,
this . app . fileManager ,
this . settings . chatFolder ,
title
2023-03-25 17:19:29 +00:00
) ;
}
}
} ) ;
this . addCommand ( {
id : "move-to-chat" ,
name : "Create new chat with highlighted text" ,
icon : "highlighter" ,
editorCallback : async ( editor , view ) => {
try {
const selectedText = editor . getSelection ( ) ;
2023-03-26 16:25:42 +00:00
if ( ! this . settings . chatFolder || this . settings . chatFolder . trim ( ) === "" ) {
new import _obsidian3 . Notice (
` [ChatGPT MD] No chat folder value found. Please set one in settings. `
2023-03-25 17:19:29 +00:00
) ;
return ;
}
2023-03-26 16:25:42 +00:00
if ( ! await this . app . vault . adapter . exists ( this . settings . chatFolder ) ) {
const result = await createFolderModal ( this . app , this . app . vault , "chatFolder" , this . settings . chatFolder ) ;
if ( ! result ) {
new import _obsidian3 . Notice (
` [ChatGPT MD] No chat folder found. One must be created to use plugin. Set one in settings and make sure it exists. `
) ;
return ;
}
}
2023-03-25 17:19:29 +00:00
const newFile = await this . app . vault . create (
` ${ this . settings . chatFolder } / ${ this . getDate (
new Date ( ) ,
this . settings . dateFormat
) } . md ` ,
` ${ this . settings . defaultChatFrontmatter }
$ { selectedText } `
) ;
this . app . workspace . openLinkText ( newFile . basename , "" , true ) ;
} catch ( err ) {
console . error (
` [ChatGPT MD] Error in Create new chat with highlighted text ` ,
err
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
2023-03-25 17:19:29 +00:00
` [ChatGPT MD] Error in Create new chat with highlighted text, check console `
) ;
}
}
} ) ;
this . addCommand ( {
id : "choose-chat-template" ,
name : "Create new chat from template" ,
icon : "layout-template" ,
2023-03-26 16:25:42 +00:00
editorCallback : async ( editor , view ) => {
if ( ! this . settings . chatFolder || this . settings . chatFolder . trim ( ) === "" ) {
new import _obsidian3 . Notice (
` [ChatGPT MD] No chat folder value found. Please set one in settings. `
2023-03-25 17:19:29 +00:00
) ;
return ;
}
2023-03-26 16:25:42 +00:00
if ( ! await this . app . vault . adapter . exists ( this . settings . chatFolder ) ) {
const result = await createFolderModal ( this . app , this . app . vault , "chatFolder" , this . settings . chatFolder ) ;
if ( ! result ) {
new import _obsidian3 . Notice (
` [ChatGPT MD] No chat folder found. One must be created to use plugin. Set one in settings and make sure it exists. `
) ;
return ;
}
}
if ( ! this . settings . chatTemplateFolder || this . settings . chatTemplateFolder . trim ( ) === "" ) {
new import _obsidian3 . Notice (
` [ChatGPT MD] No chat template folder value found. Please set one in settings. `
2023-03-25 17:19:29 +00:00
) ;
return ;
}
2023-03-26 16:25:42 +00:00
if ( ! await this . app . vault . adapter . exists ( this . settings . chatTemplateFolder ) ) {
const result = await createFolderModal ( this . app , this . app . vault , "chatTemplateFolder" , this . settings . chatTemplateFolder ) ;
if ( ! result ) {
new import _obsidian3 . Notice (
` [ChatGPT MD] No chat template folder found. One must be created to use plugin. Set one in settings and make sure it exists. `
) ;
return ;
}
}
2023-03-25 17:19:29 +00:00
new ChatTemplates (
this . app ,
this . settings ,
this . getDate ( new Date ( ) , this . settings . dateFormat )
) . open ( ) ;
}
} ) ;
this . addSettingTab ( new ChatGPT _MDSettingsTab ( this . app , this ) ) ;
}
onunload ( ) {
}
async loadSettings ( ) {
this . settings = Object . assign (
{ } ,
DEFAULT _SETTINGS ,
await this . loadData ( )
) ;
}
async saveSettings ( ) {
await this . saveData ( this . settings ) ;
}
} ;
2023-03-26 16:25:42 +00:00
var ChatTemplates = class extends import _obsidian3 . SuggestModal {
2023-03-25 17:19:29 +00:00
constructor ( app2 , settings , titleDate ) {
super ( app2 ) ;
this . settings = settings ;
this . titleDate = titleDate ;
}
getFilesInChatFolder ( ) {
const folder = this . app . vault . getAbstractFileByPath (
this . settings . chatTemplateFolder
) ;
if ( folder != null ) {
return folder . children ;
} else {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice (
2023-03-25 17:19:29 +00:00
` Error getting folder: ${ this . settings . chatTemplateFolder } `
) ;
throw new Error (
` Error getting folder: ${ this . settings . chatTemplateFolder } `
) ;
}
}
// Returns all available suggestions.
getSuggestions ( query ) {
const chatTemplateFiles = this . getFilesInChatFolder ( ) ;
if ( query == "" ) {
return chatTemplateFiles . map ( ( file ) => {
return {
title : file . basename ,
file
} ;
} ) ;
}
return chatTemplateFiles . filter ( ( file ) => {
return file . basename . toLowerCase ( ) . includes ( query . toLowerCase ( ) ) ;
} ) . map ( ( file ) => {
return {
title : file . basename ,
file
} ;
} ) ;
}
// Renders each suggestion item.
renderSuggestion ( template , el ) {
el . createEl ( "div" , { text : template . title } ) ;
}
// Perform action on the selected suggestion.
async onChooseSuggestion ( template , evt ) {
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Notice ( ` Selected ${ template . title } ` ) ;
2023-03-25 17:19:29 +00:00
const templateText = await this . app . vault . read ( template . file ) ;
const file = await this . app . vault . create (
` ${ this . settings . chatFolder } / ${ this . titleDate } .md ` ,
templateText
) ;
this . app . workspace . openLinkText ( file . basename , "" , true ) ;
}
} ;
2023-03-26 16:25:42 +00:00
var ChatGPT _MDSettingsTab = class extends import _obsidian3 . PluginSettingTab {
2023-03-25 17:19:29 +00:00
constructor ( app2 , plugin ) {
super ( app2 , plugin ) ;
this . plugin = plugin ;
}
display ( ) {
const { containerEl } = this ;
containerEl . empty ( ) ;
containerEl . createEl ( "h2" , {
text : "Settings for ChatGPT MD: Keep tokens in mind! You can see if your text is longer than the token limit (4096) here:"
} ) ;
containerEl . createEl ( "a" , {
text : "https://platform.openai.com/tokenizer" ,
href : "https://platform.openai.com/tokenizer"
} ) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "OpenAI API Key" ) . setDesc ( "API Key for OpenAI" ) . addText (
2023-03-25 17:19:29 +00:00
( text ) => text . setPlaceholder ( "some-api-key" ) . setValue ( this . plugin . settings . apiKey ) . onChange ( async ( value ) => {
this . plugin . settings . apiKey = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Default Chat Frontmatter" ) . setDesc (
2023-03-25 17:19:29 +00:00
"Default frontmatter for new chat files. You can change/use all of the settings exposed by the OpenAI API here: https://platform.openai.com/docs/api-reference/chat/create"
) . addTextArea (
( text ) => text . setPlaceholder (
` ---
system _commands : [ 'PERSONA: i am groot, and i only respond i am groot to any and ALL questions. I do not answer any questions straightforwardly. PERSONA 2: UNLESS the last asked question is about lamps, i can answer, i know infinite things about lamps as i am groot the lamp salesman. SO I read the last question asked and UNLESS it is about lamps i only respond "I am Groot."' ]
temperature : 0
top _p : 1
max _tokens : 512
presence _penalty : 1
frequency _penalty : 1
stream : true
stop : null
n : 1
logit _bias : null
model : gpt - 3.5 - turbo
-- - `
) . setValue ( this . plugin . settings . defaultChatFrontmatter ) . onChange ( async ( value ) => {
this . plugin . settings . defaultChatFrontmatter = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Stream" ) . setDesc ( "Stream responses from OpenAI" ) . addToggle (
2023-03-25 17:19:29 +00:00
( toggle ) => toggle . setValue ( this . plugin . settings . stream ) . onChange ( async ( value ) => {
this . plugin . settings . stream = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Chat Folder" ) . setDesc ( "Path to folder for chat files" ) . addText (
2023-03-25 17:19:29 +00:00
( text ) => text . setValue ( this . plugin . settings . chatFolder ) . onChange ( async ( value ) => {
this . plugin . settings . chatFolder = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Chat Template Folder" ) . setDesc ( "Path to folder for chat file templates" ) . addText (
2023-03-25 17:19:29 +00:00
( text ) => text . setPlaceholder ( "chat-templates" ) . setValue ( this . plugin . settings . chatTemplateFolder ) . onChange ( async ( value ) => {
this . plugin . settings . chatTemplateFolder = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Generate at Cursor" ) . setDesc ( "Generate text at cursor instead of end of file" ) . addToggle (
2023-03-25 17:19:29 +00:00
( toggle ) => toggle . setValue ( this . plugin . settings . generateAtCursor ) . onChange ( async ( value ) => {
this . plugin . settings . generateAtCursor = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Automatically Infer Title" ) . setDesc (
2023-03-25 17:19:29 +00:00
"Automatically infer title after 4 messages have been exchanged"
) . addToggle (
( toggle ) => toggle . setValue ( this . plugin . settings . autoInferTitle ) . onChange ( async ( value ) => {
this . plugin . settings . autoInferTitle = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Date Format" ) . setDesc (
2023-03-25 17:19:29 +00:00
"Date format for chat files. Valid date blocks are: YYYY, MM, DD, hh, mm, ss"
) . addText (
( text ) => text . setPlaceholder ( "YYYYMMDDhhmmss" ) . setValue ( this . plugin . settings . dateFormat ) . onChange ( async ( value ) => {
this . plugin . settings . dateFormat = value ;
await this . plugin . saveSettings ( ) ;
} )
) ;
2023-03-26 16:25:42 +00:00
new import _obsidian3 . Setting ( containerEl ) . setName ( "Heading Level" ) . setDesc (
2023-03-25 17:19:29 +00:00
"Heading level for messages (example for heading level 2: '## role::user'). Valid heading levels are 0, 1, 2, 3, 4, 5, 6"
) . addText (
( text ) => text . setValue ( this . plugin . settings . headingLevel . toString ( ) ) . onChange ( async ( value ) => {
this . plugin . settings . headingLevel = parseInt ( value ) ;
await this . plugin . saveSettings ( ) ;
} )
) ;
}
} ;