diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 86ea69de99db88512605e6b5d05973f909099d4a..4f2d135eb435a569f53dfd76f21df487ce9b7ecd 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -6,31 +6,40 @@ stages:
 #  - archive
 #  - deploy
 
+.compile_models: &compile_models
+  - xcrun coremlcompiler compile Sources/SwiftNLP/Resources/all-MiniLM-L6-v2.mlpackage/ Sources/SwiftNLP/Models
+  - xcrun coremlcompiler generate Sources/SwiftNLP/Resources/all-MiniLM-L6-v2.mlpackage/ --language Swift Sources/SwiftNLP/Resources
+  - mv Sources/SwiftNLP/Resources/all-MiniLM-L6-v2.swift Sources/SwiftNLP/2.\ Encoding
+  - xcrun coremlcompiler compile Sources/SwiftNLP/Resources/float32_model.mlpackage/ Sources/SwiftNLP/Models
+  - xcrun coremlcompiler generate Sources/SwiftNLP/Resources/float32_model.mlpackage/ --language Swift Sources/SwiftNLP/Resources
+  - mv Sources/SwiftNLP/Resources/float32_model.swift Sources/SwiftNLP/2.\ Encoding
+
 build-macOS:
   stage: build
 
   script:
-    - swift build -c release -Xswiftc -cross-module-optimization -Xlinker -framework -Xlinker CoreML
+    - *compile_models
+    - swift build
   
   tags:
     - macOS
 
 test-macOS:
   stage: test
+
   script:
-     - export SKIP_TESTS=DurableHNSWCorpusTests
-     - swift test -c release -Xswiftc -enable-testing
-#    - swift test --sanitize=address -c release -Xswiftc -enable-testing
-#    - swift test --sanitize=thread -c release -Xswiftc -enable-testing
+    - *compile_models
+    - swift test -c release -Xswiftc -enable-testing
+
   tags:
     - macOS
 
-
 build-linux:
   stage: build
 
   script:
     - swiftly install latest
+    - *compile_models
     - swift build -c release -Xswiftc -cross-module-optimization
   tags:
     - linux
@@ -39,8 +48,10 @@ build-linux:
 
 test-linux:
   stage: test
+
   script:
-     - swift test -c release -Xswiftc -enable-testing
+    - *compile_models
+    - swift test -c release -Xswiftc -enable-testing
 #    - swift test --sanitize=address -c release -Xswiftc -enable-testing
 #    - swift test --sanitize=thread -c release -Xswiftc -enable-testing
   tags:
diff --git a/Package.resolved b/Package.resolved
index 5f2e68ff947b10697835521b7d4554ca87972db2..a7b19d4ff9daa92c318f7b28b6bd8dc35c71d647 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -5,7 +5,7 @@
       "kind" : "remoteSourceControl",
       "location" : "https://github.com/tannerdsilva/CLMDB.git",
       "state" : {
-        "revision" : "39e2f317c898824777365d5669f6fa7571541893",
+        "revision" : "30d45263c8e512b01ab073e77ebff09039bdc593",
         "version" : "0.9.31"
       }
     },
@@ -27,6 +27,15 @@
         "version" : "2.2.0"
       }
     },
+    {
+      "identity" : "faissmobile",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/jkrukowski/FaissMobile",
+      "state" : {
+        "revision" : "9d5b9925305eea9398cc92ce4a8e51c8a4b043af",
+        "version" : "0.0.1"
+      }
+    },
     {
       "identity" : "similarity-topology",
       "kind" : "remoteSourceControl",
@@ -45,6 +54,24 @@
         "version" : "2.3.2"
       }
     },
+    {
+      "identity" : "swift-argument-parser",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/apple/swift-argument-parser",
+      "state" : {
+        "revision" : "c8ed701b513cf5177118a175d85fbbbcd707ab41",
+        "version" : "1.3.0"
+      }
+    },
+    {
+      "identity" : "swift-log",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/apple/swift-log",
+      "state" : {
+        "revision" : "e97a6fcb1ab07462881ac165fdbb37f067e205d5",
+        "version" : "1.5.4"
+      }
+    },
     {
       "identity" : "swift-numerics",
       "kind" : "remoteSourceControl",
@@ -62,6 +89,42 @@
         "revision" : "0ce7065dd10a2936179220283f8bb039482d237f",
         "version" : "0.5.0"
       }
+    },
+    {
+      "identity" : "swift-syntax",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/apple/swift-syntax.git",
+      "state" : {
+        "revision" : "64889f0c732f210a935a0ad7cda38f77f876262d",
+        "version" : "509.1.1"
+      }
+    },
+    {
+      "identity" : "swiftannoy",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/jbadger3/SwiftAnnoy",
+      "state" : {
+        "revision" : "6d90636e22510c2f0798f9f8ff072109e345750a",
+        "version" : "1.1.0"
+      }
+    },
+    {
+      "identity" : "swiftfaiss",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/jkrukowski/SwiftFaiss.git",
+      "state" : {
+        "revision" : "d3831c1e9898695ae7f680b6353e48e873d3f1d3",
+        "version" : "0.0.8"
+      }
+    },
+    {
+      "identity" : "swiftformat",
+      "kind" : "remoteSourceControl",
+      "location" : "https://github.com/nicklockwood/SwiftFormat",
+      "state" : {
+        "revision" : "dbc9a4406d21cc52f16caf1e299172b097145e5e",
+        "version" : "0.53.3"
+      }
     }
   ],
   "version" : 2
diff --git a/Package.swift b/Package.swift
index f4d6ea7dfc780051cecebc9ea00e172acec62904..a65eec373940f89f8f584736eb1cf2cdbd6080ad 100644
--- a/Package.swift
+++ b/Package.swift
@@ -2,11 +2,12 @@
 // The swift-tools-version declares the minimum version of Swift required to build this package.
 
 import PackageDescription
+import CompilerPluginSupport
 
 let package = Package(
     name: "SwiftNLP",
     platforms: [
-        .macOS(.v13),
+            .macOS(.v13)
     ],
     products: [
         .library(
@@ -20,15 +21,26 @@ let package = Package(
 //        ),
     ],
     dependencies: [
+        .package(url: "https://github.com/jbadger3/SwiftAnnoy", .upToNextMajor(from: "1.0.1")),
+        .package(url: "https://github.com/jkrukowski/SwiftFaiss.git", from: "0.0.7"),
         .package(url: "https://github.com/L1MeN9Yu/Elva", .upToNextMajor(from: "2.1.3")),
-        .package(url: "https://github.com/JadenGeller/similarity-topology", .exact("0.1.14")),
+        .package(url: "https://github.com/JadenGeller/similarity-topology", .upToNextMajor(from: "0.1.14")),
+        .package(url: "https://github.com/apple/swift-syntax", from: "509.0.0"),
         .package(url: "https://github.com/Jounce/Surge.git", .upToNextMajor(from: "2.0.0")),
-//        .package(url: "https://github.com/mingchungx/nifty.git", .branch("master"))
     ],
     targets: [
+        .macro(
+            name: "SwiftNLPGenericLLMMacros",
+            dependencies: [
+                .product(name: "SwiftSyntaxMacros", package: "swift-syntax"),
+                .product(name: "SwiftCompilerPlugin", package: "swift-syntax")
+            ]
+        ),
         .target(
             name: "SwiftNLP",
             dependencies: [
+                "SwiftAnnoy",
+                "SwiftNLPGenericLLMMacros",
                 .product(name: "HNSWAlgorithm", package: "similarity-topology"),
                 .product(name: "HNSWEphemeral", package: "similarity-topology"),
                 .product(name: "HNSWDurable", package: "similarity-topology", condition: .when(platforms: [.macOS])),
@@ -37,17 +49,16 @@ let package = Package(
                 .product(name: "ZSTD", package: "Elva"),
                 .byName(name: "Surge", condition: .when(platforms: [.macOS])),
             ],
-            resources: [.process("Resources")]
+            resources: [
+                .process("Resources/bert_vocab.txt"),
+                .process("Resources/glove.6B.50d.mmap"),
+            ]
         ),
         .testTarget(
             name: "SwiftNLPTests",
-            dependencies: ["SwiftNLP"],
-            resources: [.process("Resources")]
-        ),
-        /// This is commented out to fix the gitlab pipeline, but must be uncommented when in use on macOS only.
-//        .executableTarget(
-//            name: "SwiftNLPVisualizer",
-//            dependencies: ["SwiftNLP"]
-//        ),
+            dependencies: ["SwiftNLPGenericLLMMacros", "SwiftNLP"],
+            resources: [
+                .process("Resources"),
+            ]),
     ]
 )
diff --git a/Sources/SwiftNLP/2. Encoding/CoreMLEncoder.swift b/Sources/SwiftNLP/2. Encoding/CoreMLEncoder.swift
index 96800d39dc9ba6d4fe00a8e16760e060e903a2f0..b98aa739a03e46ac6527f2dec4439309798ffedd 100644
--- a/Sources/SwiftNLP/2. Encoding/CoreMLEncoder.swift	
+++ b/Sources/SwiftNLP/2. Encoding/CoreMLEncoder.swift	
@@ -26,69 +26,99 @@ import Foundation
 import CoreML
 
 
-struct CoreMLEncoder<Scalar: BinaryFloatingPoint>: SNLPEncoder {
+@freestanding(expression)
+public macro MODEL_MAKE_PREDICTION(_ input_name: Any, _ attention_ids: Any, _ output_name: Any) = #externalMacro(
+    module: "SwiftNLPGenericLLMMacros",
+    type: "LLMModelPredictionCases")
+
+@freestanding(expression)
+public macro MODEL_VALIDATE_NAME_AND_SET_INPUT_SIZE() = #externalMacro(
+    module: "SwiftNLPGenericLLMMacros",
+    type: "LLMModelNameValidation")
+
+
+class CoreMLEncoder<Scalar: BinaryFloatingPoint & Codable>: SNLPEncoder {
+    
+    
     
-    var zeroes: [Scalar] = []
-    var dimensions: UInt = 0
+    var zeroes: [Scalar]
+    var dimensions: UInt
+    var model: String
         
+    required init() {
+      zeroes = Array(repeating: Scalar(0), count: 384)
+      dimensions = 384
+      model = "all_MiniLM_L6_v2"
+    }
+    
+    
     func encodeToken(_ token: String) -> [Scalar] {
-        fatalError("CoreMLEncoder not implemented yet. Get on it.")
+        let tokenization = LLMEmbeddings(model_type: self.model).tokenizer.tokenizeToIds(text: token) as! [Scalar]
+        return tokenization
     }
     
     func encodeSentence(_ sentence: String) -> [Scalar] {
-        fatalError("CoreMLEncoder not implemented yet. Get on it.")
+        let encoding = Task {
+            await LLMEmbeddings(model_type: self.model).encode(sentence: sentence)
+        } as! [Scalar]
+        return encoding
     }
 }
 
-//@available(macOS 13.0, *)
-//public class MiniLMEmbeddings {
-//    public let model: all_MiniLM_L6_v2
-//    public let tokenizer: BertTokenizer
-//    public let inputDimention: Int = 512
-//    public let outputDimention: Int = 384
-//
-//    public init() {
-//        let modelConfig = MLModelConfiguration()
-//        modelConfig.computeUnits = .all
-//
-//        do {
-//            self.model = try all_MiniLM_L6_v2(configuration: modelConfig)
-//        } catch {
-//            fatalError("Failed to load the Core ML model. Error: \(error.localizedDescription)")
-//        }
-//
-//        self.tokenizer = BertTokenizer()
-//    }
-//
-//    // MARK: - Dense Embeddings
-//
-//    public func encode(sentence: String) async -> [Float]? {
-//        // Encode input text as bert tokens
-//        let inputTokens = tokenizer.buildModelTokens(sentence: sentence)
-//        let (inputIds, attentionMask) = tokenizer.buildModelInputs(from: inputTokens)
-//
-//        // Send tokens through the MLModel
-//        let embeddings = generateEmbeddings(inputIds: inputIds, attentionMask: attentionMask)
-//
-//        return embeddings
-//    }
-//
-//    public func generateEmbeddings(inputIds: MLMultiArray, attentionMask: MLMultiArray) -> [Float]? {
-//        let inputFeatures = all_MiniLM_L6_v2Input(input_ids: inputIds, attention_mask: attentionMask)
-//        
-//        let output = try? model.prediction(input: inputFeatures)
-//        guard let embeddings = output?.embeddings else {
-//            return nil
-//        }
-//        
-//        var embeddingsArray = [Float]()
-//        for index in 0..<embeddings.count {
-//            let value = embeddings[index].floatValue
-//            embeddingsArray.append(Float(value))
-//        }
-//        
-//        return embeddingsArray
-//    }
-//
-//}
+
+@available(macOS 13.0, *)
+public class LLMEmbeddings {
+    
+    private let model: String
+    public var tokenizer: BertTokenizer
+    public var inputDimention: Int = 512 // 512 is a dummy value, correct value is set by the macro below
+    public let outputDimention: Int = 384
+
+    public init(model_type: String) {
+        let modelConfig = MLModelConfiguration()
+        modelConfig.computeUnits = .all
+
+        self.model = model_type;
+        
+        // dummy initialization needed here to avoid compilation error
+        self.tokenizer = BertTokenizer(maxLen: self.inputDimention)
+        
+        // validate the model type is valid and set the correct input dimension
+        #MODEL_VALIDATE_NAME_AND_SET_INPUT_SIZE()
+        
+        // reinitialize with correct input size
+        self.tokenizer = BertTokenizer(maxLen: self.inputDimention)
+    }
+
+    public func encode(sentence: String) async -> [Float]? {
+        // Encode input text as bert tokens
+        let inputTokens = tokenizer.buildModelTokens(sentence: sentence)
+        let (inputIds, attentionMask) = tokenizer.buildModelInputs(from: inputTokens)
+
+        let embeddings = generateEmbeddings(inputIds: inputIds, attentionMask: attentionMask)
+
+        return embeddings
+    }
+
+    public func generateEmbeddings(inputIds: MLMultiArray, attentionMask: MLMultiArray) -> [Float]? {
+        var output: MLMultiArray? = nil
+        
+        // determine which model to use and generate predictions
+        #MODEL_MAKE_PREDICTION("inputIds", "attentionMask", "output")
+        
+        if (output === nil) {
+            return nil;
+        }
+        
+        let embeddings = output!;
+       
+        var embeddingsArray = [Float]()
+        for index in 0..<embeddings.count {
+            let value = embeddings[index].floatValue
+            embeddingsArray.append(Float(value))
+        }
+       
+        return embeddingsArray
+    }
+}
 #endif
diff --git a/Sources/SwiftNLP/2. Encoding/GenericModel.swift b/Sources/SwiftNLP/2. Encoding/GenericModel.swift
new file mode 100644
index 0000000000000000000000000000000000000000..65ded46bfabfa29aabbe6d6f9b783f0fbd1c3d37
--- /dev/null
+++ b/Sources/SwiftNLP/2. Encoding/GenericModel.swift	
@@ -0,0 +1,12 @@
+import CoreML
+
+
+@freestanding(declaration, names: arbitrary)
+public macro MODEL_PREDICTION_FUNCTIONS() = #externalMacro(
+    module: "SwiftNLPGenericLLMMacros",
+    type: "LLMPredictionFunctions")
+
+
+struct LLMModel {
+    #MODEL_PREDICTION_FUNCTIONS()
+}
diff --git a/Sources/SwiftNLP/2. Encoding/Tokenizers/BertTokenizer.swift b/Sources/SwiftNLP/2. Encoding/Tokenizers/BertTokenizer.swift
index 376cdff5220a49f7a57b60a71979f49db5ae57d2..8c1e01d38fa4b0688eb8cc86c73e2362c0b8f472 100644
--- a/Sources/SwiftNLP/2. Encoding/Tokenizers/BertTokenizer.swift	
+++ b/Sources/SwiftNLP/2. Encoding/Tokenizers/BertTokenizer.swift	
@@ -6,12 +6,12 @@ import CoreML
 public class BertTokenizer {
     private let basicTokenizer = BasicTokenizer()
     private let wordpieceTokenizer: WordpieceTokenizer
-    private let maxLen = 512
+    private var maxLen = 512
 
     private let vocab: [String: Int]
     private let ids_to_tokens: [Int: String]
 
-    public init() {
+    public init(maxLen: Int) {
         let url = Bundle.module.url(forResource: "bert_vocab", withExtension: "txt")!
         let vocabTxt = try! String(contentsOf: url)
         let tokens = vocabTxt.split(separator: "\n").map { String($0) }
@@ -24,6 +24,7 @@ public class BertTokenizer {
         self.vocab = vocab
         self.ids_to_tokens = ids_to_tokens
         self.wordpieceTokenizer = WordpieceTokenizer(vocab: self.vocab)
+        self.maxLen = maxLen
     }
 
     public func buildModelTokens(sentence: String) -> [Int] {
diff --git a/Sources/SwiftNLP/Resources/float32_model.mlpackage/Data/com.apple.CoreML/model.mlmodel b/Sources/SwiftNLP/Resources/float32_model.mlpackage/Data/com.apple.CoreML/model.mlmodel
new file mode 100644
index 0000000000000000000000000000000000000000..a23498c8857990e3a536ab50919b8358fe2fd143
--- /dev/null
+++ b/Sources/SwiftNLP/Resources/float32_model.mlpackage/Data/com.apple.CoreML/model.mlmodel
@@ -0,0 +1,11270 @@
+ý
+L
+	input_ids2Indices of input sequence tokens in the vocabulary*	
+€ €
+w
+attention_maskXMask to avoid performing attention on padding token indices (1 = not masked, 0 = masked)*	
+€ €Rj
+last_hidden_stateFSequence of hidden-states at the output of the last layer of the model
*
+€€ €RX
+
pooler_output:Last layer hidden-state of the first token of the sequence*	
+€ €¢ì
+'thenlper/gte-small (feature-extraction)¢3
+#com.github.apple.coremltools.sourcetorch==2.1.0¢+
+$com.github.apple.coremltools.version7.1¢:
++com.github.apple.coremltools.source_dialectTorchScript¢3
+co.huggingface.exporters.namethenlper/gte-small¢3
+co.huggingface.exporters.taskfeature-extraction¢2
+%co.huggingface.exporters.architecture	BertModel¢-
+"co.huggingface.exporters.frameworkpytorch¢-
+"co.huggingface.exporters.precisionfloat32¢
+transformers_version4.28.1²¨²º°
+main°°
+ 
+	input_ids
+
+
+€
+%
+attention_mask
+
+
+€CoreML5Ú¯
+CoreML5ͯlast_hidden_state
pooler_outputÌ
+const@
+'model_embeddings_word_embeddings_weight
+
+ºî
+€*=
+name5
+-
++")
+'model_embeddings_word_embeddings_weight*B
+val;
+
+ºî
+€*"
+@model_path/weights/weight.bin@¯
+const0
+model_embeddings_LayerNorm_bias
+
+€*5
+name-
+%
+#"!
+model_embeddings_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¹­³
+const2
+!model_embeddings_LayerNorm_weight
+
+€*7
+name/
+'
+%"#
+!model_embeddings_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÅ­Ï
+const@
+/model_encoder_layer_0_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_0_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ò­á
+constI
+1model_encoder_layer_0_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_0_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÞ­Ë
+const>
+-model_encoder_layer_0_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_0_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ßÑÝ
+constG
+/model_encoder_layer_0_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_0_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀëÑÏ
+const@
+/model_encoder_layer_0_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_0_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ìõá
+constI
+1model_encoder_layer_0_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_0_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀøõÓ
+constB
+1model_encoder_layer_0_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_0_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ù™å
+constK
+3model_encoder_layer_0_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_0_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ…šÛ
+constF
+5model_encoder_layer_0_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_0_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€†¾ß
+constH
+7model_encoder_layer_0_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_0_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀ’¾Ë
+const>
+-model_encoder_layer_0_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_0_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ÿ¾Ý
+constG
+/model_encoder_layer_0_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_0_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀϾ¿
+const8
+'model_encoder_layer_0_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_0_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ÐÎÑ
+constA
+)model_encoder_layer_0_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_0_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÜÎÇ
+const<
++model_encoder_layer_0_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_0_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ÝÞË
+const>
+-model_encoder_layer_0_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_0_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀéÞÏ
+const@
+/model_encoder_layer_1_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_1_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€öÞá
+constI
+1model_encoder_layer_1_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_1_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ‚ßË
+const>
+-model_encoder_layer_1_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_1_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ƒƒÝ
+constG
+/model_encoder_layer_1_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_1_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀƒÏ
+const@
+/model_encoder_layer_1_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_1_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€§á
+constI
+1model_encoder_layer_1_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_1_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀœ§Ó
+constB
+1model_encoder_layer_1_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_1_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ëå
+constK
+3model_encoder_layer_1_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_1_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ©ËÛ
+constF
+5model_encoder_layer_1_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_1_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ªïß
+constH
+7model_encoder_layer_1_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_1_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀ¶ïË
+const>
+-model_encoder_layer_1_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_1_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ÃïÝ
+constG
+/model_encoder_layer_1_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_1_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀóï¿
+const8
+'model_encoder_layer_1_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_1_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ôÿÑ
+constA
+)model_encoder_layer_1_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_1_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ€€Ç
+const<
++model_encoder_layer_1_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_1_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ë
+const>
+-model_encoder_layer_1_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_1_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÏ
+const@
+/model_encoder_layer_2_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_2_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€šá
+constI
+1model_encoder_layer_2_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_2_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¦Ë
+const>
+-model_encoder_layer_2_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_2_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€§´Ý
+constG
+/model_encoder_layer_2_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_2_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ³´Ï
+const@
+/model_encoder_layer_2_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_2_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€´Øá
+constI
+1model_encoder_layer_2_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_2_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÀØÓ
+constB
+1model_encoder_layer_2_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_2_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Áüå
+constK
+3model_encoder_layer_2_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_2_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÍüÛ
+constF
+5model_encoder_layer_2_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_2_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Î ß
+constH
+7model_encoder_layer_2_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_2_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÚ Ë
+const>
+-model_encoder_layer_2_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_2_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ç Ý
+constG
+/model_encoder_layer_2_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_2_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ—¡¿
+const8
+'model_encoder_layer_2_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_2_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€˜±Ñ
+constA
+)model_encoder_layer_2_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_2_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¤±Ç
+const<
++model_encoder_layer_2_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_2_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¥Á Ë
+const>
+-model_encoder_layer_2_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_2_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀ±Á Ï
+const@
+/model_encoder_layer_3_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_3_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¾Á á
+constI
+1model_encoder_layer_3_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_3_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÊÁ Ë
+const>
+-model_encoder_layer_3_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_3_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ëå Ý
+constG
+/model_encoder_layer_3_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_3_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ×å Ï
+const@
+/model_encoder_layer_3_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_3_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ø‰!á
+constI
+1model_encoder_layer_3_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_3_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀä‰!Ó
+constB
+1model_encoder_layer_3_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_3_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€å­!å
+constK
+3model_encoder_layer_3_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_3_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀñ­!Û
+constF
+5model_encoder_layer_3_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_3_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€òÑ!ß
+constH
+7model_encoder_layer_3_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_3_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀþÑ!Ë
+const>
+-model_encoder_layer_3_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_3_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€‹Ò!Ý
+constG
+/model_encoder_layer_3_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_3_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ»Ò!¿
+const8
+'model_encoder_layer_3_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_3_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¼â"Ñ
+constA
+)model_encoder_layer_3_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_3_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÈâ"Ç
+const<
++model_encoder_layer_3_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_3_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Éò#Ë
+const>
+-model_encoder_layer_3_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_3_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÕò#Ï
+const@
+/model_encoder_layer_4_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_4_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€âò#á
+constI
+1model_encoder_layer_4_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_4_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀîò#Ë
+const>
+-model_encoder_layer_4_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_4_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ï–$Ý
+constG
+/model_encoder_layer_4_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_4_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀû–$Ï
+const@
+/model_encoder_layer_4_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_4_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€üº$á
+constI
+1model_encoder_layer_4_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_4_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀˆ»$Ó
+constB
+1model_encoder_layer_4_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_4_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€‰ß$å
+constK
+3model_encoder_layer_4_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_4_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ•ß$Û
+constF
+5model_encoder_layer_4_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_4_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€–ƒ%ß
+constH
+7model_encoder_layer_4_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_4_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀ¢ƒ%Ë
+const>
+-model_encoder_layer_4_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_4_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¯ƒ%Ý
+constG
+/model_encoder_layer_4_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_4_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ߃%¿
+const8
+'model_encoder_layer_4_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_4_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€à“&Ñ
+constA
+)model_encoder_layer_4_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_4_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀì“&Ç
+const<
++model_encoder_layer_4_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_4_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€í£'Ë
+const>
+-model_encoder_layer_4_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_4_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀù£'Ï
+const@
+/model_encoder_layer_5_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_5_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€†¤'á
+constI
+1model_encoder_layer_5_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_5_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ’¤'Ë
+const>
+-model_encoder_layer_5_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_5_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€“È'Ý
+constG
+/model_encoder_layer_5_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_5_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀŸÈ'Ï
+const@
+/model_encoder_layer_5_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_5_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ ì'á
+constI
+1model_encoder_layer_5_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_5_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¬ì'Ó
+constB
+1model_encoder_layer_5_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_5_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€­(å
+constK
+3model_encoder_layer_5_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_5_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¹(Û
+constF
+5model_encoder_layer_5_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_5_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€º´(ß
+constH
+7model_encoder_layer_5_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_5_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÆ´(Ë
+const>
+-model_encoder_layer_5_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_5_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ó´(Ý
+constG
+/model_encoder_layer_5_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_5_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀƒµ(¿
+const8
+'model_encoder_layer_5_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_5_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€„Å)Ñ
+constA
+)model_encoder_layer_5_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_5_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÅ)Ç
+const<
++model_encoder_layer_5_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_5_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€‘Õ*Ë
+const>
+-model_encoder_layer_5_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_5_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÕ*Ï
+const@
+/model_encoder_layer_6_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_6_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ªÕ*á
+constI
+1model_encoder_layer_6_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_6_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¶Õ*Ë
+const>
+-model_encoder_layer_6_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_6_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€·ù*Ý
+constG
+/model_encoder_layer_6_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_6_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÃù*Ï
+const@
+/model_encoder_layer_6_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_6_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ä+á
+constI
+1model_encoder_layer_6_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_6_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀН+Ó
+constB
+1model_encoder_layer_6_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_6_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ÑÁ+å
+constK
+3model_encoder_layer_6_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_6_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÝÁ+Û
+constF
+5model_encoder_layer_6_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_6_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Þå+ß
+constH
+7model_encoder_layer_6_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_6_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀêå+Ë
+const>
+-model_encoder_layer_6_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_6_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€÷å+Ý
+constG
+/model_encoder_layer_6_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_6_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ§æ+¿
+const8
+'model_encoder_layer_6_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_6_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¨ö,Ñ
+constA
+)model_encoder_layer_6_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_6_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ´ö,Ç
+const<
++model_encoder_layer_6_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_6_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€µ†.Ë
+const>
+-model_encoder_layer_6_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_6_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÁ†.Ï
+const@
+/model_encoder_layer_7_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_7_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Î†.á
+constI
+1model_encoder_layer_7_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_7_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÚ†.Ë
+const>
+-model_encoder_layer_7_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_7_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ûª.Ý
+constG
+/model_encoder_layer_7_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_7_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀçª.Ï
+const@
+/model_encoder_layer_7_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_7_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€èÎ.á
+constI
+1model_encoder_layer_7_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_7_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀôÎ.Ó
+constB
+1model_encoder_layer_7_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_7_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€õò.å
+constK
+3model_encoder_layer_7_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_7_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀó.Û
+constF
+5model_encoder_layer_7_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_7_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€‚—/ß
+constH
+7model_encoder_layer_7_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_7_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀŽ—/Ë
+const>
+-model_encoder_layer_7_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_7_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€›—/Ý
+constG
+/model_encoder_layer_7_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_7_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀË—/¿
+const8
+'model_encoder_layer_7_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_7_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ì§0Ñ
+constA
+)model_encoder_layer_7_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_7_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀا0Ç
+const<
++model_encoder_layer_7_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_7_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ù·1Ë
+const>
+-model_encoder_layer_7_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_7_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀå·1Ï
+const@
+/model_encoder_layer_8_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_8_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ò·1á
+constI
+1model_encoder_layer_8_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_8_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀþ·1Ë
+const>
+-model_encoder_layer_8_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_8_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ÿÛ1Ý
+constG
+/model_encoder_layer_8_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_8_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ‹Ü1Ï
+const@
+/model_encoder_layer_8_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_8_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Œ€2á
+constI
+1model_encoder_layer_8_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_8_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ˜€2Ó
+constB
+1model_encoder_layer_8_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_8_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€™¤2å
+constK
+3model_encoder_layer_8_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_8_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¥¤2Û
+constF
+5model_encoder_layer_8_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_8_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¦È2ß
+constH
+7model_encoder_layer_8_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_8_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀ²È2Ë
+const>
+-model_encoder_layer_8_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_8_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¿È2Ý
+constG
+/model_encoder_layer_8_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_8_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀïÈ2¿
+const8
+'model_encoder_layer_8_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_8_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ðØ3Ñ
+constA
+)model_encoder_layer_8_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_8_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀüØ3Ç
+const<
++model_encoder_layer_8_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_8_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ýè4Ë
+const>
+-model_encoder_layer_8_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_8_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀ‰é4Ï
+const@
+/model_encoder_layer_9_attention_self_query_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_9_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€–é4á
+constI
+1model_encoder_layer_9_attention_self_query_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_9_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¢é4Ë
+const>
+-model_encoder_layer_9_attention_self_key_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_9_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€£5Ý
+constG
+/model_encoder_layer_9_attention_self_key_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_9_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¯5Ï
+const@
+/model_encoder_layer_9_attention_self_value_bias
+
+€*E
+name=
+5
+3"1
+/model_encoder_layer_9_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€°±5á
+constI
+1model_encoder_layer_9_attention_self_value_weight
+
+€
+€*G
+name?
+7
+5"3
+1model_encoder_layer_9_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ¼±5Ó
+constB
+1model_encoder_layer_9_attention_output_dense_bias
+
+€*G
+name?
+7
+5"3
+1model_encoder_layer_9_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€½Õ5å
+constK
+3model_encoder_layer_9_attention_output_dense_weight
+
+€
+€*I
+nameA
+9
+7"5
+3model_encoder_layer_9_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÉÕ5Û
+constF
+5model_encoder_layer_9_attention_output_LayerNorm_bias
+
+€*K
+nameC
+;
+9"7
+5model_encoder_layer_9_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Êù5ß
+constH
+7model_encoder_layer_9_attention_output_LayerNorm_weight
+
+€*M
+nameE
+=
+;"9
+7model_encoder_layer_9_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÖù5Ë
+const>
+-model_encoder_layer_9_intermediate_dense_bias
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_9_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ãù5Ý
+constG
+/model_encoder_layer_9_intermediate_dense_weight
+
+€
+€*E
+name=
+5
+3"1
+/model_encoder_layer_9_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ“ú5¿
+const8
+'model_encoder_layer_9_output_dense_bias
+
+€*=
+name5
+-
++")
+'model_encoder_layer_9_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€”Š7Ñ
+constA
+)model_encoder_layer_9_output_dense_weight
+
+€
+€*?
+name7
+/
+-"+
+)model_encoder_layer_9_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ Š7Ç
+const<
++model_encoder_layer_9_output_LayerNorm_bias
+
+€*A
+name9
+1
+/"-
++model_encoder_layer_9_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¡š8Ë
+const>
+-model_encoder_layer_9_output_LayerNorm_weight
+
+€*C
+name;
+3
+1"/
+-model_encoder_layer_9_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀ­š8Ñ
+constA
+0model_encoder_layer_10_attention_self_query_bias
+
+€*F
+name>
+6
+4"2
+0model_encoder_layer_10_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ºš8ã
+constJ
+2model_encoder_layer_10_attention_self_query_weight
+
+€
+€*H
+name@
+8
+6"4
+2model_encoder_layer_10_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÆš8Í
+const?
+.model_encoder_layer_10_attention_self_key_bias
+
+€*D
+name<
+4
+2"0
+.model_encoder_layer_10_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ç¾8ß
+constH
+0model_encoder_layer_10_attention_self_key_weight
+
+€
+€*F
+name>
+6
+4"2
+0model_encoder_layer_10_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÓ¾8Ñ
+constA
+0model_encoder_layer_10_attention_self_value_bias
+
+€*F
+name>
+6
+4"2
+0model_encoder_layer_10_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Ôâ8ã
+constJ
+2model_encoder_layer_10_attention_self_value_weight
+
+€
+€*H
+name@
+8
+6"4
+2model_encoder_layer_10_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀàâ8Õ
+constC
+2model_encoder_layer_10_attention_output_dense_bias
+
+€*H
+name@
+8
+6"4
+2model_encoder_layer_10_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€á†9ç
+constL
+4model_encoder_layer_10_attention_output_dense_weight
+
+€
+€*J
+nameB
+:
+8"6
+4model_encoder_layer_10_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀí†9Ý
+constG
+6model_encoder_layer_10_attention_output_LayerNorm_bias
+
+€*L
+nameD
+<
+:"8
+6model_encoder_layer_10_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€îª9á
+constI
+8model_encoder_layer_10_attention_output_LayerNorm_weight
+
+€*N
+nameF
+>
+<":
+8model_encoder_layer_10_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀúª9Í
+const?
+.model_encoder_layer_10_intermediate_dense_bias
+
+€*D
+name<
+4
+2"0
+.model_encoder_layer_10_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€‡«9ß
+constH
+0model_encoder_layer_10_intermediate_dense_weight
+
+€
+€*F
+name>
+6
+4"2
+0model_encoder_layer_10_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ·«9Á
+const9
+(model_encoder_layer_10_output_dense_bias
+
+€*>
+name6
+.
+,"*
+(model_encoder_layer_10_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€¸»:Ó
+constB
+*model_encoder_layer_10_output_dense_weight
+
+€
+€*@
+name8
+0
+.",
+*model_encoder_layer_10_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÄ»:É
+const=
+,model_encoder_layer_10_output_LayerNorm_bias
+
+€*B
+name:
+2
+0".
+,model_encoder_layer_10_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ÅË;Í
+const?
+.model_encoder_layer_10_output_LayerNorm_weight
+
+€*D
+name<
+4
+2"0
+.model_encoder_layer_10_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀÑË;Ñ
+constA
+0model_encoder_layer_11_attention_self_query_bias
+
+€*F
+name>
+6
+4"2
+0model_encoder_layer_11_attention_self_query_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ÞË;ã
+constJ
+2model_encoder_layer_11_attention_self_query_weight
+
+€
+€*H
+name@
+8
+6"4
+2model_encoder_layer_11_attention_self_query_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀêË;Í
+const?
+.model_encoder_layer_11_attention_self_key_bias
+
+€*D
+name<
+4
+2"0
+.model_encoder_layer_11_attention_self_key_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ëï;ß
+constH
+0model_encoder_layer_11_attention_self_key_weight
+
+€
+€*F
+name>
+6
+4"2
+0model_encoder_layer_11_attention_self_key_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ÷ï;Ñ
+constA
+0model_encoder_layer_11_attention_self_value_bias
+
+€*F
+name>
+6
+4"2
+0model_encoder_layer_11_attention_self_value_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€ø“<ã
+constJ
+2model_encoder_layer_11_attention_self_value_weight
+
+€
+€*H
+name@
+8
+6"4
+2model_encoder_layer_11_attention_self_value_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ„”<Õ
+constC
+2model_encoder_layer_11_attention_output_dense_bias
+
+€*H
+name@
+8
+6"4
+2model_encoder_layer_11_attention_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€…¸<ç
+constL
+4model_encoder_layer_11_attention_output_dense_weight
+
+€
+€*J
+nameB
+:
+8"6
+4model_encoder_layer_11_attention_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀ‘¸<Ý
+constG
+6model_encoder_layer_11_attention_output_LayerNorm_bias
+
+€*L
+nameD
+<
+:"8
+6model_encoder_layer_11_attention_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€’Ü<á
+constI
+8model_encoder_layer_11_attention_output_LayerNorm_weight
+
+€*N
+nameF
+>
+<":
+8model_encoder_layer_11_attention_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀžÜ<Í
+const?
+.model_encoder_layer_11_intermediate_dense_bias
+
+€*D
+name<
+4
+2"0
+.model_encoder_layer_11_intermediate_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€«Ü<ß
+constH
+0model_encoder_layer_11_intermediate_dense_weight
+
+€
+€*F
+name>
+6
+4"2
+0model_encoder_layer_11_intermediate_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀÛÜ<Á
+const9
+(model_encoder_layer_11_output_dense_bias
+
+€*>
+name6
+.
+,"*
+(model_encoder_layer_11_output_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€Üì=Ó
+constB
+*model_encoder_layer_11_output_dense_weight
+
+€
+€*@
+name8
+0
+.",
+*model_encoder_layer_11_output_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀèì=É
+const=
+,model_encoder_layer_11_output_LayerNorm_bias
+
+€*B
+name:
+2
+0".
+,model_encoder_layer_11_output_LayerNorm_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€éü>Í
+const?
+.model_encoder_layer_11_output_LayerNorm_weight
+
+€*D
+name<
+4
+2"0
+.model_encoder_layer_11_output_LayerNorm_weight*=
+val6
+
+€*%
+@model_path/weights/weight.binÀõü>Ÿ
+const(
+model_pooler_dense_bias
+
+€*-
+name%
+
+"
+model_pooler_dense_bias*=
+val6
+
+€*%
+@model_path/weights/weight.bin€‚ý>±
+const1
+model_pooler_dense_weight
+
+€
+€*/
+name'
+
+"
+model_pooler_dense_weight*D
+val=
+
+€
+€*%
+@model_path/weights/weight.binÀŽý>S
+const
+var_8
+*
+name
+
+
+"
+op_8*
+val
+
+
+
+ÿÿÿÿÿÿÿÿÿO
+const
+var_10
+*
+name
+
+	"
+op_10*
+val
+
+
+
+
+̼Œ+O
+const
+var_13
+*
+name
+
+	"
+op_13*
+val
+
+
+
+
+��€?j
+const
+
var_34_axes_0
+
+
+*"
+name
+
+"
+op_34_axes_0*
+val
+
+
+
+
+ƒ
+expand_dims
+x
+
+attention_mask
+axes
+
+
var_34_axes_0#
+var_34
+
+
+
+€*
+name
+
+	"
+op_34j
+const
+
var_35_axes_0
+
+
+*"
+name
+
+"
+op_35_axes_0*
+val
+
+
+
+
+
+expand_dims
+x
+
+
+var_34
+axes
+
+
var_35_axes_0)
+var_35
+
+
+
+
+€*
+name
+
+	"
+op_35_
+const
+var_37_dtype_0
+*#
+name
+
+"
+
op_37_dtype_0*
+val
+
+
+"
+fp32
+cast
+x
+
+
+var_35
+dtype
+
+var_37_dtype_0*
+cast_75
+
+
+
+
+€*
+name
+
+"	
+cast_75p
+sub
+x
+
+
+var_13
+y
+	
+cast_75)
+var_38
+
+
+
+
+€*
+name
+
+	"
+op_38O
+const
+var_39
+*
+name
+
+	"
+op_39*
+val
+
+
+
+
+ÿÿÿ‚
+mul
+x
+
+
+var_38
+y
+
+
+var_393
+attention_mask_1
+
+
+
+
+€*$
+name
+
+"
+attention_maski
+const
+inputs_embeds_axis_0
+**
+name"
+
+"
+inputs_embeds_axis_0*
+val
+
+
+�È
+gather0
+x+
+)
+'model_embeddings_word_embeddings_weight
+indices
+
+	input_ids 
+axis
+
+inputs_embeds_axis_0+
+
inputs_embeds
+
+
+€
+€*#
+name
+
+"
+
inputs_embeds¹
+const5
+token_type_embeddings_1
+
+
+€
+€*-
+name%
+
+"
+token_type_embeddings_1*J
+valC
+
+
+€
+€*%
+@model_path/weights/weight.bin€¡?
+add
+x
+
+
inputs_embeds 
+y
+
+token_type_embeddings_1*
+embeddings_1
+
+
+€
+€*"
+name
+
+"
+embeddings_1µ
+const3
+position_embeddings_1
+
+
+€
+€*+
+name#
+
+"
+position_embeddings_1*J
+valC
+
+
+€
+€*%
+@model_path/weights/weight.binÀ­?‚
+add
+x
+
+embeddings_1
+y
+
+position_embeddings_1%
+input_5
+
+
+€
+€*
+name
+
+"	
+input_5v
+const
+input_7_axes_0
+
+
+*$
+name
+
+"
+input_7_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿô
+
+layer_norm
+x
+	
+input_5
+axes
+
+input_7_axes_0.
+gamma%
+#
+!model_embeddings_LayerNorm_weight+
+beta#
+!
+model_embeddings_LayerNorm_bias
+epsilon
+
+
+var_10%
+input_7
+
+
+€
+€*
+name
+
+"	
+input_7à
+linear
+x
+	
+input_7?
+weight5
+3
+1model_encoder_layer_0_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_0_attention_self_query_bias&
+linear_0
+
+
+€
+€*
+name
+
+"
+
+linear_0Ü
+linear
+x
+	
+input_7=
+weight3
+1
+/model_encoder_layer_0_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_0_attention_self_key_bias&
+linear_1
+
+
+€
+€*
+name
+
+"
+
+linear_1b
+const
+var_106
+
+
+*
+name
+
+
+"
+op_106*"
+val
+
+
+
+	
+€ u
+reshape
+x
+
+
+linear_1
+shape
+	
+var_106&
+x_3
+
+
+€
+
+ *
+name
+	
+"
+x_3à
+linear
+x
+	
+input_7?
+weight5
+3
+1model_encoder_layer_0_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_0_attention_self_value_bias&
+linear_2
+
+
+€
+€*
+name
+
+"
+
+linear_2b
+const
+var_115
+
+
+*
+name
+
+
+"
+op_115*"
+val
+
+
+
+	
+€ u
+reshape
+x
+
+
+linear_2
+shape
+	
+var_115&
+x_7
+
+
+€
+
+ *
+name
+	
+"
+x_7a
+const
+var_117
+
+
+*
+name
+
+
+"
+op_117*!
+val
+
+
+
+
+
+�b
+const
+var_121
+
+
+*
+name
+
+
+"
+op_121*"
+val
+
+
+
+	
+€ w
+reshape
+x
+
+
+linear_0
+shape
+	
+var_121'
+x_11
+
+
+€
+
+ *
+name
+
+
+"
+x_11
+const(
+ attention_scores_1_transpose_x_0
+*6
+name.
+&
+$""
+ attention_scores_1_transpose_x_0*
+val
+
+
+�
+const(
+ attention_scores_1_transpose_y_0
+*6
+name.
+&
+$""
+ attention_scores_1_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_36_perm_0
+
+
+*)
+name!
+
+"
+transpose_36_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_37_perm_0
+
+
+*)
+name!
+
+"
+transpose_37_perm_0*!
+val
+
+
+
+
+
+�‘
+	transpose
+x
+
+x_3
+perm
+
+transpose_37_perm_00
+
transpose_105
+
+
+
+ 
+€*#
+name
+
+"
+
transpose_105’
+	transpose
+x
+
+x_11
+perm
+
+transpose_36_perm_00
+
transpose_106
+
+
+
+€
+ *#
+name
+
+"
+
transpose_106„
+matmul
+x
+
+
transpose_106
+y
+
+
transpose_1053
+transpose_x$
+"
+ attention_scores_1_transpose_x_03
+transpose_y$
+"
+ attention_scores_1_transpose_y_06
+attention_scores_1 
+
+
+
+€
+€*(
+name 
+
+"
+attention_scores_1„
+const(
+ _inversed_attention_scores_3_y_0
+*6
+name.
+&
+$""
+ _inversed_attention_scores_3_y_0*
+val
+
+
+
+
+ó5>Ã
+mul
+x
+
+attention_scores_1)
+y$
+"
+ _inversed_attention_scores_3_y_0@
+_inversed_attention_scores_3 
+
+
+
+€
+€*2
+name*
+"
+ "
+_inversed_attention_scores_3•
+add%
+x 
+
+_inversed_attention_scores_3
+y
+
+attention_mask_1,
+input_11 
+
+
+
+€
+€*
+name
+
+"
+
+input_11}
+softmax
+x
+
+
+input_11
+axis	
+
+var_8,
+input_13 
+
+
+
+€
+€*
+name
+
+"
+
+input_13{
+const%
+context_layer_1_transpose_x_0
+*3
+name+
+#
+!"
+context_layer_1_transpose_x_0*
+val
+
+
+�{
+const%
+context_layer_1_transpose_y_0
+*3
+name+
+#
+!"
+context_layer_1_transpose_y_0*
+val
+
+
+�…
+	transpose
+x
+
+x_7
+perm
+	
+var_1170
+
transpose_107
+
+
+
+€
+ *#
+name
+
+"
+
transpose_107ò
+matmul
+x
+
+
+input_13
+y
+
+
transpose_1070
+transpose_x!
+
+context_layer_1_transpose_x_00
+transpose_y!
+
+context_layer_1_transpose_y_02
+context_layer_1
+
+
+
+€
+ *%
+name
+
+"
+context_layer_1a
+const
+var_133
+
+
+*
+name
+
+
+"
+op_133*!
+val
+
+
+
+
+
+�b
+const
+var_138
+
+
+*
+name
+
+
+"
+op_138*"
+val
+
+
+
+	
+€€‘
+	transpose
+x
+
+context_layer_1
+perm
+	
+var_1330
+
transpose_104
+
+
+€
+
+ *#
+name
+
+"
+
transpose_104
+reshape
+x
+
+
transpose_104
+shape
+	
+var_138&
+input_15
+
+
+€
+€*
+name
+
+"
+
+input_15å
+linear
+x
+
+
+input_15A
+weight7
+5
+3model_encoder_layer_0_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_0_attention_output_dense_bias&
+linear_3
+
+
+€
+€*
+name
+
+"
+
+linear_3r
+add
+x
+
+
+linear_3
+y
+	
+input_7&
+input_19
+
+
+€
+€*
+name
+
+"
+
+input_19x
+const
+input_21_axes_0
+
+
+*%
+name
+
+"
+input_21_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¤
+
+layer_norm
+x
+
+
+input_19
+axes
+
+input_21_axes_0D
+gamma;
+9
+7model_encoder_layer_0_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_0_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_21
+
+
+€
+€*
+name
+
+"
+
+input_21Ý
+linear
+x
+
+
+input_21=
+weight3
+1
+/model_encoder_layer_0_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_0_intermediate_dense_bias&
+linear_4
+
+
+€
+€*
+name
+
+"
+
+linear_4c
+const
+input_25_mode_0
+*%
+name
+
+"
+input_25_mode_0*
+val
+
+	"
+EXACT~
+gelu
+x
+
+
+linear_4
+mode
+
+input_25_mode_0&
+input_25
+
+
+€
+€*
+name
+
+"
+
+input_25Ñ
+linear
+x
+
+
+input_257
+weight-
++
+)model_encoder_layer_0_output_dense_weight3
+bias+
+)
+'model_encoder_layer_0_output_dense_bias&
+linear_5
+
+
+€
+€*
+name
+
+"
+
+linear_5s
+add
+x
+
+
+linear_5
+y
+
+
+input_21&
+input_29
+
+
+€
+€*
+name
+
+"
+
+input_29x
+const
+input_31_axes_0
+
+
+*%
+name
+
+"
+input_31_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ
+
+layer_norm
+x
+
+
+input_29
+axes
+
+input_31_axes_0:
+gamma1
+/
+-model_encoder_layer_0_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_0_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_31
+
+
+€
+€*
+name
+
+"
+
+input_31á
+linear
+x
+
+
+input_31?
+weight5
+3
+1model_encoder_layer_1_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_1_attention_self_query_bias&
+linear_6
+
+
+€
+€*
+name
+
+"
+
+linear_6Ý
+linear
+x
+
+
+input_31=
+weight3
+1
+/model_encoder_layer_1_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_1_attention_self_key_bias&
+linear_7
+
+
+€
+€*
+name
+
+"
+
+linear_7b
+const
+var_183
+
+
+*
+name
+
+
+"
+op_183*"
+val
+
+
+
+	
+€ w
+reshape
+x
+
+
+linear_7
+shape
+	
+var_183'
+x_15
+
+
+€
+
+ *
+name
+
+
+"
+x_15á
+linear
+x
+
+
+input_31?
+weight5
+3
+1model_encoder_layer_1_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_1_attention_self_value_bias&
+linear_8
+
+
+€
+€*
+name
+
+"
+
+linear_8b
+const
+var_192
+
+
+*
+name
+
+
+"
+op_192*"
+val
+
+
+
+	
+€ w
+reshape
+x
+
+
+linear_8
+shape
+	
+var_192'
+x_19
+
+
+€
+
+ *
+name
+
+
+"
+x_19a
+const
+var_194
+
+
+*
+name
+
+
+"
+op_194*!
+val
+
+
+
+
+
+�b
+const
+var_198
+
+
+*
+name
+
+
+"
+op_198*"
+val
+
+
+
+	
+€ w
+reshape
+x
+
+
+linear_6
+shape
+	
+var_198'
+x_23
+
+
+€
+
+ *
+name
+
+
+"
+x_23
+const(
+ attention_scores_5_transpose_x_0
+*6
+name.
+&
+$""
+ attention_scores_5_transpose_x_0*
+val
+
+
+�
+const(
+ attention_scores_5_transpose_y_0
+*6
+name.
+&
+$""
+ attention_scores_5_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_38_perm_0
+
+
+*)
+name!
+
+"
+transpose_38_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_39_perm_0
+
+
+*)
+name!
+
+"
+transpose_39_perm_0*!
+val
+
+
+
+
+
+�’
+	transpose
+x
+
+x_15
+perm
+
+transpose_39_perm_00
+
transpose_101
+
+
+
+ 
+€*#
+name
+
+"
+
transpose_101’
+	transpose
+x
+
+x_23
+perm
+
+transpose_38_perm_00
+
transpose_102
+
+
+
+€
+ *#
+name
+
+"
+
transpose_102„
+matmul
+x
+
+
transpose_102
+y
+
+
transpose_1013
+transpose_x$
+"
+ attention_scores_5_transpose_x_03
+transpose_y$
+"
+ attention_scores_5_transpose_y_06
+attention_scores_5 
+
+
+
+€
+€*(
+name 
+
+"
+attention_scores_5„
+const(
+ _inversed_attention_scores_7_y_0
+*6
+name.
+&
+$""
+ _inversed_attention_scores_7_y_0*
+val
+
+
+
+
+ó5>Ã
+mul
+x
+
+attention_scores_5)
+y$
+"
+ _inversed_attention_scores_7_y_0@
+_inversed_attention_scores_7 
+
+
+
+€
+€*2
+name*
+"
+ "
+_inversed_attention_scores_7•
+add%
+x 
+
+_inversed_attention_scores_7
+y
+
+attention_mask_1,
+input_33 
+
+
+
+€
+€*
+name
+
+"
+
+input_33}
+softmax
+x
+
+
+input_33
+axis	
+
+var_8,
+input_35 
+
+
+
+€
+€*
+name
+
+"
+
+input_35{
+const%
+context_layer_5_transpose_x_0
+*3
+name+
+#
+!"
+context_layer_5_transpose_x_0*
+val
+
+
+�{
+const%
+context_layer_5_transpose_y_0
+*3
+name+
+#
+!"
+context_layer_5_transpose_y_0*
+val
+
+
+�†
+	transpose
+x
+
+x_19
+perm
+	
+var_1940
+
transpose_103
+
+
+
+€
+ *#
+name
+
+"
+
transpose_103ò
+matmul
+x
+
+
+input_35
+y
+
+
transpose_1030
+transpose_x!
+
+context_layer_5_transpose_x_00
+transpose_y!
+
+context_layer_5_transpose_y_02
+context_layer_5
+
+
+
+€
+ *%
+name
+
+"
+context_layer_5a
+const
+var_210
+
+
+*
+name
+
+
+"
+op_210*!
+val
+
+
+
+
+
+�b
+const
+var_215
+
+
+*
+name
+
+
+"
+op_215*"
+val
+
+
+
+	
+€€‘
+	transpose
+x
+
+context_layer_5
+perm
+	
+var_2100
+
transpose_100
+
+
+€
+
+ *#
+name
+
+"
+
transpose_100
+reshape
+x
+
+
transpose_100
+shape
+	
+var_215&
+input_37
+
+
+€
+€*
+name
+
+"
+
+input_37å
+linear
+x
+
+
+input_37A
+weight7
+5
+3model_encoder_layer_1_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_1_attention_output_dense_bias&
+linear_9
+
+
+€
+€*
+name
+
+"
+
+linear_9s
+add
+x
+
+
+linear_9
+y
+
+
+input_31&
+input_41
+
+
+€
+€*
+name
+
+"
+
+input_41x
+const
+input_43_axes_0
+
+
+*%
+name
+
+"
+input_43_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¤
+
+layer_norm
+x
+
+
+input_41
+axes
+
+input_43_axes_0D
+gamma;
+9
+7model_encoder_layer_1_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_1_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_43
+
+
+€
+€*
+name
+
+"
+
+input_43ß
+linear
+x
+
+
+input_43=
+weight3
+1
+/model_encoder_layer_1_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_1_intermediate_dense_bias'
+	linear_10
+
+
+€
+€*
+name
+
+
"
+	linear_10c
+const
+input_47_mode_0
+*%
+name
+
+"
+input_47_mode_0*
+val
+
+	"
+EXACT
+gelu
+x
+
+	linear_10
+mode
+
+input_47_mode_0&
+input_47
+
+
+€
+€*
+name
+
+"
+
+input_47Ó
+linear
+x
+
+
+input_477
+weight-
++
+)model_encoder_layer_1_output_dense_weight3
+bias+
+)
+'model_encoder_layer_1_output_dense_bias'
+	linear_11
+
+
+€
+€*
+name
+
+
"
+	linear_11t
+add
+x
+
+	linear_11
+y
+
+
+input_43&
+input_51
+
+
+€
+€*
+name
+
+"
+
+input_51x
+const
+input_53_axes_0
+
+
+*%
+name
+
+"
+input_53_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ
+
+layer_norm
+x
+
+
+input_51
+axes
+
+input_53_axes_0:
+gamma1
+/
+-model_encoder_layer_1_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_1_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_53
+
+
+€
+€*
+name
+
+"
+
+input_53ã
+linear
+x
+
+
+input_53?
+weight5
+3
+1model_encoder_layer_2_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_2_attention_self_query_bias'
+	linear_12
+
+
+€
+€*
+name
+
+
"
+	linear_12ß
+linear
+x
+
+
+input_53=
+weight3
+1
+/model_encoder_layer_2_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_2_attention_self_key_bias'
+	linear_13
+
+
+€
+€*
+name
+
+
"
+	linear_13b
+const
+var_260
+
+
+*
+name
+
+
+"
+op_260*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_13
+shape
+	
+var_260'
+x_27
+
+
+€
+
+ *
+name
+
+
+"
+x_27ã
+linear
+x
+
+
+input_53?
+weight5
+3
+1model_encoder_layer_2_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_2_attention_self_value_bias'
+	linear_14
+
+
+€
+€*
+name
+
+
"
+	linear_14b
+const
+var_269
+
+
+*
+name
+
+
+"
+op_269*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_14
+shape
+	
+var_269'
+x_31
+
+
+€
+
+ *
+name
+
+
+"
+x_31a
+const
+var_271
+
+
+*
+name
+
+
+"
+op_271*!
+val
+
+
+
+
+
+�b
+const
+var_275
+
+
+*
+name
+
+
+"
+op_275*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_12
+shape
+	
+var_275'
+x_35
+
+
+€
+
+ *
+name
+
+
+"
+x_35
+const(
+ attention_scores_9_transpose_x_0
+*6
+name.
+&
+$""
+ attention_scores_9_transpose_x_0*
+val
+
+
+�
+const(
+ attention_scores_9_transpose_y_0
+*6
+name.
+&
+$""
+ attention_scores_9_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_40_perm_0
+
+
+*)
+name!
+
+"
+transpose_40_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_41_perm_0
+
+
+*)
+name!
+
+"
+transpose_41_perm_0*!
+val
+
+
+
+
+
+�
+	transpose
+x
+
+x_27
+perm
+
+transpose_41_perm_0/
+transpose_97
+
+
+
+ 
+€*"
+name
+
+"
+transpose_97
+	transpose
+x
+
+x_35
+perm
+
+transpose_40_perm_0/
+transpose_98
+
+
+
+€
+ *"
+name
+
+"
+transpose_98‚
+matmul
+x
+
+transpose_98
+y
+
+transpose_973
+transpose_x$
+"
+ attention_scores_9_transpose_x_03
+transpose_y$
+"
+ attention_scores_9_transpose_y_06
+attention_scores_9 
+
+
+
+€
+€*(
+name 
+
+"
+attention_scores_9†
+const)
+!_inversed_attention_scores_11_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_11_y_0*
+val
+
+
+
+
+ó5>Æ
+mul
+x
+
+attention_scores_9*
+y%
+#
+!_inversed_attention_scores_11_y_0A
+_inversed_attention_scores_11 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_11–
+add&
+x!
+
+_inversed_attention_scores_11
+y
+
+attention_mask_1,
+input_55 
+
+
+
+€
+€*
+name
+
+"
+
+input_55}
+softmax
+x
+
+
+input_55
+axis	
+
+var_8,
+input_57 
+
+
+
+€
+€*
+name
+
+"
+
+input_57{
+const%
+context_layer_9_transpose_x_0
+*3
+name+
+#
+!"
+context_layer_9_transpose_x_0*
+val
+
+
+�{
+const%
+context_layer_9_transpose_y_0
+*3
+name+
+#
+!"
+context_layer_9_transpose_y_0*
+val
+
+
+�„
+	transpose
+x
+
+x_31
+perm
+	
+var_271/
+transpose_99
+
+
+
+€
+ *"
+name
+
+"
+transpose_99ñ
+matmul
+x
+
+
+input_57
+y
+
+transpose_990
+transpose_x!
+
+context_layer_9_transpose_x_00
+transpose_y!
+
+context_layer_9_transpose_y_02
+context_layer_9
+
+
+
+€
+ *%
+name
+
+"
+context_layer_9a
+const
+var_287
+
+
+*
+name
+
+
+"
+op_287*!
+val
+
+
+
+
+
+�b
+const
+var_292
+
+
+*
+name
+
+
+"
+op_292*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_9
+perm
+	
+var_287/
+transpose_96
+
+
+€
+
+ *"
+name
+
+"
+transpose_96~
+reshape
+x
+
+transpose_96
+shape
+	
+var_292&
+input_59
+
+
+€
+€*
+name
+
+"
+
+input_59ç
+linear
+x
+
+
+input_59A
+weight7
+5
+3model_encoder_layer_2_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_2_attention_output_dense_bias'
+	linear_15
+
+
+€
+€*
+name
+
+
"
+	linear_15t
+add
+x
+
+	linear_15
+y
+
+
+input_53&
+input_63
+
+
+€
+€*
+name
+
+"
+
+input_63x
+const
+input_65_axes_0
+
+
+*%
+name
+
+"
+input_65_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¤
+
+layer_norm
+x
+
+
+input_63
+axes
+
+input_65_axes_0D
+gamma;
+9
+7model_encoder_layer_2_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_2_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_65
+
+
+€
+€*
+name
+
+"
+
+input_65ß
+linear
+x
+
+
+input_65=
+weight3
+1
+/model_encoder_layer_2_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_2_intermediate_dense_bias'
+	linear_16
+
+
+€
+€*
+name
+
+
"
+	linear_16c
+const
+input_69_mode_0
+*%
+name
+
+"
+input_69_mode_0*
+val
+
+	"
+EXACT
+gelu
+x
+
+	linear_16
+mode
+
+input_69_mode_0&
+input_69
+
+
+€
+€*
+name
+
+"
+
+input_69Ó
+linear
+x
+
+
+input_697
+weight-
++
+)model_encoder_layer_2_output_dense_weight3
+bias+
+)
+'model_encoder_layer_2_output_dense_bias'
+	linear_17
+
+
+€
+€*
+name
+
+
"
+	linear_17t
+add
+x
+
+	linear_17
+y
+
+
+input_65&
+input_73
+
+
+€
+€*
+name
+
+"
+
+input_73x
+const
+input_75_axes_0
+
+
+*%
+name
+
+"
+input_75_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ
+
+layer_norm
+x
+
+
+input_73
+axes
+
+input_75_axes_0:
+gamma1
+/
+-model_encoder_layer_2_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_2_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_75
+
+
+€
+€*
+name
+
+"
+
+input_75ã
+linear
+x
+
+
+input_75?
+weight5
+3
+1model_encoder_layer_3_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_3_attention_self_query_bias'
+	linear_18
+
+
+€
+€*
+name
+
+
"
+	linear_18ß
+linear
+x
+
+
+input_75=
+weight3
+1
+/model_encoder_layer_3_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_3_attention_self_key_bias'
+	linear_19
+
+
+€
+€*
+name
+
+
"
+	linear_19b
+const
+var_337
+
+
+*
+name
+
+
+"
+op_337*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_19
+shape
+	
+var_337'
+x_39
+
+
+€
+
+ *
+name
+
+
+"
+x_39ã
+linear
+x
+
+
+input_75?
+weight5
+3
+1model_encoder_layer_3_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_3_attention_self_value_bias'
+	linear_20
+
+
+€
+€*
+name
+
+
"
+	linear_20b
+const
+var_346
+
+
+*
+name
+
+
+"
+op_346*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_20
+shape
+	
+var_346'
+x_43
+
+
+€
+
+ *
+name
+
+
+"
+x_43a
+const
+var_348
+
+
+*
+name
+
+
+"
+op_348*!
+val
+
+
+
+
+
+�b
+const
+var_352
+
+
+*
+name
+
+
+"
+op_352*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_18
+shape
+	
+var_352'
+x_47
+
+
+€
+
+ *
+name
+
+
+"
+x_47ƒ
+const)
+!attention_scores_13_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_13_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_13_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_13_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_42_perm_0
+
+
+*)
+name!
+
+"
+transpose_42_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_43_perm_0
+
+
+*)
+name!
+
+"
+transpose_43_perm_0*!
+val
+
+
+
+
+
+�
+	transpose
+x
+
+x_39
+perm
+
+transpose_43_perm_0/
+transpose_93
+
+
+
+ 
+€*"
+name
+
+"
+transpose_93
+	transpose
+x
+
+x_47
+perm
+
+transpose_42_perm_0/
+transpose_94
+
+
+
+€
+ *"
+name
+
+"
+transpose_94†
+matmul
+x
+
+transpose_94
+y
+
+transpose_934
+transpose_x%
+#
+!attention_scores_13_transpose_x_04
+transpose_y%
+#
+!attention_scores_13_transpose_y_07
+attention_scores_13 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_13†
+const)
+!_inversed_attention_scores_15_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_15_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_13*
+y%
+#
+!_inversed_attention_scores_15_y_0A
+_inversed_attention_scores_15 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_15–
+add&
+x!
+
+_inversed_attention_scores_15
+y
+
+attention_mask_1,
+input_77 
+
+
+
+€
+€*
+name
+
+"
+
+input_77}
+softmax
+x
+
+
+input_77
+axis	
+
+var_8,
+input_79 
+
+
+
+€
+€*
+name
+
+"
+
+input_79}
+const&
+context_layer_13_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_13_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_13_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_13_transpose_y_0*
+val
+
+
+�„
+	transpose
+x
+
+x_43
+perm
+	
+var_348/
+transpose_95
+
+
+
+€
+ *"
+name
+
+"
+transpose_95õ
+matmul
+x
+
+
+input_79
+y
+
+transpose_951
+transpose_x"
+ 
+context_layer_13_transpose_x_01
+transpose_y"
+ 
+context_layer_13_transpose_y_03
+context_layer_13
+
+
+
+€
+ *&
+name
+
+"
+context_layer_13a
+const
+var_364
+
+
+*
+name
+
+
+"
+op_364*!
+val
+
+
+
+
+
+�b
+const
+var_369
+
+
+*
+name
+
+
+"
+op_369*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_13
+perm
+	
+var_364/
+transpose_92
+
+
+€
+
+ *"
+name
+
+"
+transpose_92~
+reshape
+x
+
+transpose_92
+shape
+	
+var_369&
+input_81
+
+
+€
+€*
+name
+
+"
+
+input_81ç
+linear
+x
+
+
+input_81A
+weight7
+5
+3model_encoder_layer_3_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_3_attention_output_dense_bias'
+	linear_21
+
+
+€
+€*
+name
+
+
"
+	linear_21t
+add
+x
+
+	linear_21
+y
+
+
+input_75&
+input_85
+
+
+€
+€*
+name
+
+"
+
+input_85x
+const
+input_87_axes_0
+
+
+*%
+name
+
+"
+input_87_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¤
+
+layer_norm
+x
+
+
+input_85
+axes
+
+input_87_axes_0D
+gamma;
+9
+7model_encoder_layer_3_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_3_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_87
+
+
+€
+€*
+name
+
+"
+
+input_87ß
+linear
+x
+
+
+input_87=
+weight3
+1
+/model_encoder_layer_3_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_3_intermediate_dense_bias'
+	linear_22
+
+
+€
+€*
+name
+
+
"
+	linear_22c
+const
+input_91_mode_0
+*%
+name
+
+"
+input_91_mode_0*
+val
+
+	"
+EXACT
+gelu
+x
+
+	linear_22
+mode
+
+input_91_mode_0&
+input_91
+
+
+€
+€*
+name
+
+"
+
+input_91Ó
+linear
+x
+
+
+input_917
+weight-
++
+)model_encoder_layer_3_output_dense_weight3
+bias+
+)
+'model_encoder_layer_3_output_dense_bias'
+	linear_23
+
+
+€
+€*
+name
+
+
"
+	linear_23t
+add
+x
+
+	linear_23
+y
+
+
+input_87&
+input_95
+
+
+€
+€*
+name
+
+"
+
+input_95x
+const
+input_97_axes_0
+
+
+*%
+name
+
+"
+input_97_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ
+
+layer_norm
+x
+
+
+input_95
+axes
+
+input_97_axes_0:
+gamma1
+/
+-model_encoder_layer_3_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_3_output_LayerNorm_bias
+epsilon
+
+
+var_10&
+input_97
+
+
+€
+€*
+name
+
+"
+
+input_97ã
+linear
+x
+
+
+input_97?
+weight5
+3
+1model_encoder_layer_4_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_4_attention_self_query_bias'
+	linear_24
+
+
+€
+€*
+name
+
+
"
+	linear_24ß
+linear
+x
+
+
+input_97=
+weight3
+1
+/model_encoder_layer_4_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_4_attention_self_key_bias'
+	linear_25
+
+
+€
+€*
+name
+
+
"
+	linear_25b
+const
+var_414
+
+
+*
+name
+
+
+"
+op_414*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_25
+shape
+	
+var_414'
+x_51
+
+
+€
+
+ *
+name
+
+
+"
+x_51ã
+linear
+x
+
+
+input_97?
+weight5
+3
+1model_encoder_layer_4_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_4_attention_self_value_bias'
+	linear_26
+
+
+€
+€*
+name
+
+
"
+	linear_26b
+const
+var_423
+
+
+*
+name
+
+
+"
+op_423*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_26
+shape
+	
+var_423'
+x_55
+
+
+€
+
+ *
+name
+
+
+"
+x_55a
+const
+var_425
+
+
+*
+name
+
+
+"
+op_425*!
+val
+
+
+
+
+
+�b
+const
+var_429
+
+
+*
+name
+
+
+"
+op_429*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_24
+shape
+	
+var_429'
+x_59
+
+
+€
+
+ *
+name
+
+
+"
+x_59ƒ
+const)
+!attention_scores_17_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_17_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_17_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_17_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_44_perm_0
+
+
+*)
+name!
+
+"
+transpose_44_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_45_perm_0
+
+
+*)
+name!
+
+"
+transpose_45_perm_0*!
+val
+
+
+
+
+
+�
+	transpose
+x
+
+x_51
+perm
+
+transpose_45_perm_0/
+transpose_89
+
+
+
+ 
+€*"
+name
+
+"
+transpose_89
+	transpose
+x
+
+x_59
+perm
+
+transpose_44_perm_0/
+transpose_90
+
+
+
+€
+ *"
+name
+
+"
+transpose_90†
+matmul
+x
+
+transpose_90
+y
+
+transpose_894
+transpose_x%
+#
+!attention_scores_17_transpose_x_04
+transpose_y%
+#
+!attention_scores_17_transpose_y_07
+attention_scores_17 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_17†
+const)
+!_inversed_attention_scores_19_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_19_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_17*
+y%
+#
+!_inversed_attention_scores_19_y_0A
+_inversed_attention_scores_19 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_19–
+add&
+x!
+
+_inversed_attention_scores_19
+y
+
+attention_mask_1,
+input_99 
+
+
+
+€
+€*
+name
+
+"
+
+input_99
+softmax
+x
+
+
+input_99
+axis	
+
+var_8-
+	input_101 
+
+
+
+€
+€*
+name
+
+
"
+	input_101}
+const&
+context_layer_17_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_17_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_17_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_17_transpose_y_0*
+val
+
+
+�„
+	transpose
+x
+
+x_55
+perm
+	
+var_425/
+transpose_91
+
+
+
+€
+ *"
+name
+
+"
+transpose_91ö
+matmul
+x
+
+	input_101
+y
+
+transpose_911
+transpose_x"
+ 
+context_layer_17_transpose_x_01
+transpose_y"
+ 
+context_layer_17_transpose_y_03
+context_layer_17
+
+
+
+€
+ *&
+name
+
+"
+context_layer_17a
+const
+var_441
+
+
+*
+name
+
+
+"
+op_441*!
+val
+
+
+
+
+
+�b
+const
+var_446
+
+
+*
+name
+
+
+"
+op_446*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_17
+perm
+	
+var_441/
+transpose_88
+
+
+€
+
+ *"
+name
+
+"
+transpose_88€
+reshape
+x
+
+transpose_88
+shape
+	
+var_446'
+	input_103
+
+
+€
+€*
+name
+
+
"
+	input_103è
+linear
+x
+
+	input_103A
+weight7
+5
+3model_encoder_layer_4_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_4_attention_output_dense_bias'
+	linear_27
+
+
+€
+€*
+name
+
+
"
+	linear_27v
+add
+x
+
+	linear_27
+y
+
+
+input_97'
+	input_107
+
+
+€
+€*
+name
+
+
"
+	input_107z
+const 
+input_109_axes_0
+
+
+*&
+name
+
+"
+input_109_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¨
+
+layer_norm
+x
+
+	input_107
+axes
+
+input_109_axes_0D
+gamma;
+9
+7model_encoder_layer_4_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_4_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_109
+
+
+€
+€*
+name
+
+
"
+	input_109à
+linear
+x
+
+	input_109=
+weight3
+1
+/model_encoder_layer_4_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_4_intermediate_dense_bias'
+	linear_28
+
+
+€
+€*
+name
+
+
"
+	linear_28e
+const
+input_113_mode_0
+*&
+name
+
+"
+input_113_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_28
+mode
+
+input_113_mode_0'
+	input_113
+
+
+€
+€*
+name
+
+
"
+	input_113Ô
+linear
+x
+
+	input_1137
+weight-
++
+)model_encoder_layer_4_output_dense_weight3
+bias+
+)
+'model_encoder_layer_4_output_dense_bias'
+	linear_29
+
+
+€
+€*
+name
+
+
"
+	linear_29w
+add
+x
+
+	linear_29
+y
+
+	input_109'
+	input_117
+
+
+€
+€*
+name
+
+
"
+	input_117z
+const 
+input_119_axes_0
+
+
+*&
+name
+
+"
+input_119_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ”
+
+layer_norm
+x
+
+	input_117
+axes
+
+input_119_axes_0:
+gamma1
+/
+-model_encoder_layer_4_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_4_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_119
+
+
+€
+€*
+name
+
+
"
+	input_119ä
+linear
+x
+
+	input_119?
+weight5
+3
+1model_encoder_layer_5_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_5_attention_self_query_bias'
+	linear_30
+
+
+€
+€*
+name
+
+
"
+	linear_30à
+linear
+x
+
+	input_119=
+weight3
+1
+/model_encoder_layer_5_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_5_attention_self_key_bias'
+	linear_31
+
+
+€
+€*
+name
+
+
"
+	linear_31b
+const
+var_491
+
+
+*
+name
+
+
+"
+op_491*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_31
+shape
+	
+var_491'
+x_63
+
+
+€
+
+ *
+name
+
+
+"
+x_63ä
+linear
+x
+
+	input_119?
+weight5
+3
+1model_encoder_layer_5_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_5_attention_self_value_bias'
+	linear_32
+
+
+€
+€*
+name
+
+
"
+	linear_32b
+const
+var_500
+
+
+*
+name
+
+
+"
+op_500*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_32
+shape
+	
+var_500'
+x_67
+
+
+€
+
+ *
+name
+
+
+"
+x_67a
+const
+var_502
+
+
+*
+name
+
+
+"
+op_502*!
+val
+
+
+
+
+
+�b
+const
+var_506
+
+
+*
+name
+
+
+"
+op_506*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_30
+shape
+	
+var_506'
+x_71
+
+
+€
+
+ *
+name
+
+
+"
+x_71ƒ
+const)
+!attention_scores_21_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_21_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_21_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_21_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_46_perm_0
+
+
+*)
+name!
+
+"
+transpose_46_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_47_perm_0
+
+
+*)
+name!
+
+"
+transpose_47_perm_0*!
+val
+
+
+
+
+
+�
+	transpose
+x
+
+x_63
+perm
+
+transpose_47_perm_0/
+transpose_85
+
+
+
+ 
+€*"
+name
+
+"
+transpose_85
+	transpose
+x
+
+x_71
+perm
+
+transpose_46_perm_0/
+transpose_86
+
+
+
+€
+ *"
+name
+
+"
+transpose_86†
+matmul
+x
+
+transpose_86
+y
+
+transpose_854
+transpose_x%
+#
+!attention_scores_21_transpose_x_04
+transpose_y%
+#
+!attention_scores_21_transpose_y_07
+attention_scores_21 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_21†
+const)
+!_inversed_attention_scores_23_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_23_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_21*
+y%
+#
+!_inversed_attention_scores_23_y_0A
+_inversed_attention_scores_23 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_23˜
+add&
+x!
+
+_inversed_attention_scores_23
+y
+
+attention_mask_1-
+	input_121 
+
+
+
+€
+€*
+name
+
+
"
+	input_121€
+softmax
+x
+
+	input_121
+axis	
+
+var_8-
+	input_123 
+
+
+
+€
+€*
+name
+
+
"
+	input_123}
+const&
+context_layer_21_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_21_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_21_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_21_transpose_y_0*
+val
+
+
+�„
+	transpose
+x
+
+x_67
+perm
+	
+var_502/
+transpose_87
+
+
+
+€
+ *"
+name
+
+"
+transpose_87ö
+matmul
+x
+
+	input_123
+y
+
+transpose_871
+transpose_x"
+ 
+context_layer_21_transpose_x_01
+transpose_y"
+ 
+context_layer_21_transpose_y_03
+context_layer_21
+
+
+
+€
+ *&
+name
+
+"
+context_layer_21a
+const
+var_518
+
+
+*
+name
+
+
+"
+op_518*!
+val
+
+
+
+
+
+�b
+const
+var_523
+
+
+*
+name
+
+
+"
+op_523*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_21
+perm
+	
+var_518/
+transpose_84
+
+
+€
+
+ *"
+name
+
+"
+transpose_84€
+reshape
+x
+
+transpose_84
+shape
+	
+var_523'
+	input_125
+
+
+€
+€*
+name
+
+
"
+	input_125è
+linear
+x
+
+	input_125A
+weight7
+5
+3model_encoder_layer_5_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_5_attention_output_dense_bias'
+	linear_33
+
+
+€
+€*
+name
+
+
"
+	linear_33w
+add
+x
+
+	linear_33
+y
+
+	input_119'
+	input_129
+
+
+€
+€*
+name
+
+
"
+	input_129z
+const 
+input_131_axes_0
+
+
+*&
+name
+
+"
+input_131_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¨
+
+layer_norm
+x
+
+	input_129
+axes
+
+input_131_axes_0D
+gamma;
+9
+7model_encoder_layer_5_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_5_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_131
+
+
+€
+€*
+name
+
+
"
+	input_131à
+linear
+x
+
+	input_131=
+weight3
+1
+/model_encoder_layer_5_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_5_intermediate_dense_bias'
+	linear_34
+
+
+€
+€*
+name
+
+
"
+	linear_34e
+const
+input_135_mode_0
+*&
+name
+
+"
+input_135_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_34
+mode
+
+input_135_mode_0'
+	input_135
+
+
+€
+€*
+name
+
+
"
+	input_135Ô
+linear
+x
+
+	input_1357
+weight-
++
+)model_encoder_layer_5_output_dense_weight3
+bias+
+)
+'model_encoder_layer_5_output_dense_bias'
+	linear_35
+
+
+€
+€*
+name
+
+
"
+	linear_35w
+add
+x
+
+	linear_35
+y
+
+	input_131'
+	input_139
+
+
+€
+€*
+name
+
+
"
+	input_139z
+const 
+input_141_axes_0
+
+
+*&
+name
+
+"
+input_141_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ”
+
+layer_norm
+x
+
+	input_139
+axes
+
+input_141_axes_0:
+gamma1
+/
+-model_encoder_layer_5_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_5_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_141
+
+
+€
+€*
+name
+
+
"
+	input_141ä
+linear
+x
+
+	input_141?
+weight5
+3
+1model_encoder_layer_6_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_6_attention_self_query_bias'
+	linear_36
+
+
+€
+€*
+name
+
+
"
+	linear_36à
+linear
+x
+
+	input_141=
+weight3
+1
+/model_encoder_layer_6_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_6_attention_self_key_bias'
+	linear_37
+
+
+€
+€*
+name
+
+
"
+	linear_37b
+const
+var_568
+
+
+*
+name
+
+
+"
+op_568*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_37
+shape
+	
+var_568'
+x_75
+
+
+€
+
+ *
+name
+
+
+"
+x_75ä
+linear
+x
+
+	input_141?
+weight5
+3
+1model_encoder_layer_6_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_6_attention_self_value_bias'
+	linear_38
+
+
+€
+€*
+name
+
+
"
+	linear_38b
+const
+var_577
+
+
+*
+name
+
+
+"
+op_577*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_38
+shape
+	
+var_577'
+x_79
+
+
+€
+
+ *
+name
+
+
+"
+x_79a
+const
+var_579
+
+
+*
+name
+
+
+"
+op_579*!
+val
+
+
+
+
+
+�b
+const
+var_583
+
+
+*
+name
+
+
+"
+op_583*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_36
+shape
+	
+var_583'
+x_83
+
+
+€
+
+ *
+name
+
+
+"
+x_83ƒ
+const)
+!attention_scores_25_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_25_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_25_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_25_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_48_perm_0
+
+
+*)
+name!
+
+"
+transpose_48_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_49_perm_0
+
+
+*)
+name!
+
+"
+transpose_49_perm_0*!
+val
+
+
+
+
+
+�
+	transpose
+x
+
+x_75
+perm
+
+transpose_49_perm_0/
+transpose_81
+
+
+
+ 
+€*"
+name
+
+"
+transpose_81
+	transpose
+x
+
+x_83
+perm
+
+transpose_48_perm_0/
+transpose_82
+
+
+
+€
+ *"
+name
+
+"
+transpose_82†
+matmul
+x
+
+transpose_82
+y
+
+transpose_814
+transpose_x%
+#
+!attention_scores_25_transpose_x_04
+transpose_y%
+#
+!attention_scores_25_transpose_y_07
+attention_scores_25 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_25†
+const)
+!_inversed_attention_scores_27_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_27_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_25*
+y%
+#
+!_inversed_attention_scores_27_y_0A
+_inversed_attention_scores_27 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_27˜
+add&
+x!
+
+_inversed_attention_scores_27
+y
+
+attention_mask_1-
+	input_143 
+
+
+
+€
+€*
+name
+
+
"
+	input_143€
+softmax
+x
+
+	input_143
+axis	
+
+var_8-
+	input_145 
+
+
+
+€
+€*
+name
+
+
"
+	input_145}
+const&
+context_layer_25_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_25_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_25_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_25_transpose_y_0*
+val
+
+
+�„
+	transpose
+x
+
+x_79
+perm
+	
+var_579/
+transpose_83
+
+
+
+€
+ *"
+name
+
+"
+transpose_83ö
+matmul
+x
+
+	input_145
+y
+
+transpose_831
+transpose_x"
+ 
+context_layer_25_transpose_x_01
+transpose_y"
+ 
+context_layer_25_transpose_y_03
+context_layer_25
+
+
+
+€
+ *&
+name
+
+"
+context_layer_25a
+const
+var_595
+
+
+*
+name
+
+
+"
+op_595*!
+val
+
+
+
+
+
+�b
+const
+var_600
+
+
+*
+name
+
+
+"
+op_600*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_25
+perm
+	
+var_595/
+transpose_80
+
+
+€
+
+ *"
+name
+
+"
+transpose_80€
+reshape
+x
+
+transpose_80
+shape
+	
+var_600'
+	input_147
+
+
+€
+€*
+name
+
+
"
+	input_147è
+linear
+x
+
+	input_147A
+weight7
+5
+3model_encoder_layer_6_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_6_attention_output_dense_bias'
+	linear_39
+
+
+€
+€*
+name
+
+
"
+	linear_39w
+add
+x
+
+	linear_39
+y
+
+	input_141'
+	input_151
+
+
+€
+€*
+name
+
+
"
+	input_151z
+const 
+input_153_axes_0
+
+
+*&
+name
+
+"
+input_153_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¨
+
+layer_norm
+x
+
+	input_151
+axes
+
+input_153_axes_0D
+gamma;
+9
+7model_encoder_layer_6_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_6_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_153
+
+
+€
+€*
+name
+
+
"
+	input_153à
+linear
+x
+
+	input_153=
+weight3
+1
+/model_encoder_layer_6_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_6_intermediate_dense_bias'
+	linear_40
+
+
+€
+€*
+name
+
+
"
+	linear_40e
+const
+input_157_mode_0
+*&
+name
+
+"
+input_157_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_40
+mode
+
+input_157_mode_0'
+	input_157
+
+
+€
+€*
+name
+
+
"
+	input_157Ô
+linear
+x
+
+	input_1577
+weight-
++
+)model_encoder_layer_6_output_dense_weight3
+bias+
+)
+'model_encoder_layer_6_output_dense_bias'
+	linear_41
+
+
+€
+€*
+name
+
+
"
+	linear_41w
+add
+x
+
+	linear_41
+y
+
+	input_153'
+	input_161
+
+
+€
+€*
+name
+
+
"
+	input_161z
+const 
+input_163_axes_0
+
+
+*&
+name
+
+"
+input_163_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ”
+
+layer_norm
+x
+
+	input_161
+axes
+
+input_163_axes_0:
+gamma1
+/
+-model_encoder_layer_6_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_6_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_163
+
+
+€
+€*
+name
+
+
"
+	input_163ä
+linear
+x
+
+	input_163?
+weight5
+3
+1model_encoder_layer_7_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_7_attention_self_query_bias'
+	linear_42
+
+
+€
+€*
+name
+
+
"
+	linear_42à
+linear
+x
+
+	input_163=
+weight3
+1
+/model_encoder_layer_7_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_7_attention_self_key_bias'
+	linear_43
+
+
+€
+€*
+name
+
+
"
+	linear_43b
+const
+var_645
+
+
+*
+name
+
+
+"
+op_645*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_43
+shape
+	
+var_645'
+x_87
+
+
+€
+
+ *
+name
+
+
+"
+x_87ä
+linear
+x
+
+	input_163?
+weight5
+3
+1model_encoder_layer_7_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_7_attention_self_value_bias'
+	linear_44
+
+
+€
+€*
+name
+
+
"
+	linear_44b
+const
+var_654
+
+
+*
+name
+
+
+"
+op_654*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_44
+shape
+	
+var_654'
+x_91
+
+
+€
+
+ *
+name
+
+
+"
+x_91a
+const
+var_656
+
+
+*
+name
+
+
+"
+op_656*!
+val
+
+
+
+
+
+�b
+const
+var_660
+
+
+*
+name
+
+
+"
+op_660*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_42
+shape
+	
+var_660'
+x_95
+
+
+€
+
+ *
+name
+
+
+"
+x_95ƒ
+const)
+!attention_scores_29_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_29_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_29_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_29_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_50_perm_0
+
+
+*)
+name!
+
+"
+transpose_50_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_51_perm_0
+
+
+*)
+name!
+
+"
+transpose_51_perm_0*!
+val
+
+
+
+
+
+�
+	transpose
+x
+
+x_87
+perm
+
+transpose_51_perm_0/
+transpose_77
+
+
+
+ 
+€*"
+name
+
+"
+transpose_77
+	transpose
+x
+
+x_95
+perm
+
+transpose_50_perm_0/
+transpose_78
+
+
+
+€
+ *"
+name
+
+"
+transpose_78†
+matmul
+x
+
+transpose_78
+y
+
+transpose_774
+transpose_x%
+#
+!attention_scores_29_transpose_x_04
+transpose_y%
+#
+!attention_scores_29_transpose_y_07
+attention_scores_29 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_29†
+const)
+!_inversed_attention_scores_31_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_31_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_29*
+y%
+#
+!_inversed_attention_scores_31_y_0A
+_inversed_attention_scores_31 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_31˜
+add&
+x!
+
+_inversed_attention_scores_31
+y
+
+attention_mask_1-
+	input_165 
+
+
+
+€
+€*
+name
+
+
"
+	input_165€
+softmax
+x
+
+	input_165
+axis	
+
+var_8-
+	input_167 
+
+
+
+€
+€*
+name
+
+
"
+	input_167}
+const&
+context_layer_29_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_29_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_29_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_29_transpose_y_0*
+val
+
+
+�„
+	transpose
+x
+
+x_91
+perm
+	
+var_656/
+transpose_79
+
+
+
+€
+ *"
+name
+
+"
+transpose_79ö
+matmul
+x
+
+	input_167
+y
+
+transpose_791
+transpose_x"
+ 
+context_layer_29_transpose_x_01
+transpose_y"
+ 
+context_layer_29_transpose_y_03
+context_layer_29
+
+
+
+€
+ *&
+name
+
+"
+context_layer_29a
+const
+var_672
+
+
+*
+name
+
+
+"
+op_672*!
+val
+
+
+
+
+
+�b
+const
+var_677
+
+
+*
+name
+
+
+"
+op_677*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_29
+perm
+	
+var_672/
+transpose_76
+
+
+€
+
+ *"
+name
+
+"
+transpose_76€
+reshape
+x
+
+transpose_76
+shape
+	
+var_677'
+	input_169
+
+
+€
+€*
+name
+
+
"
+	input_169è
+linear
+x
+
+	input_169A
+weight7
+5
+3model_encoder_layer_7_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_7_attention_output_dense_bias'
+	linear_45
+
+
+€
+€*
+name
+
+
"
+	linear_45w
+add
+x
+
+	linear_45
+y
+
+	input_163'
+	input_173
+
+
+€
+€*
+name
+
+
"
+	input_173z
+const 
+input_175_axes_0
+
+
+*&
+name
+
+"
+input_175_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¨
+
+layer_norm
+x
+
+	input_173
+axes
+
+input_175_axes_0D
+gamma;
+9
+7model_encoder_layer_7_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_7_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_175
+
+
+€
+€*
+name
+
+
"
+	input_175à
+linear
+x
+
+	input_175=
+weight3
+1
+/model_encoder_layer_7_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_7_intermediate_dense_bias'
+	linear_46
+
+
+€
+€*
+name
+
+
"
+	linear_46e
+const
+input_179_mode_0
+*&
+name
+
+"
+input_179_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_46
+mode
+
+input_179_mode_0'
+	input_179
+
+
+€
+€*
+name
+
+
"
+	input_179Ô
+linear
+x
+
+	input_1797
+weight-
++
+)model_encoder_layer_7_output_dense_weight3
+bias+
+)
+'model_encoder_layer_7_output_dense_bias'
+	linear_47
+
+
+€
+€*
+name
+
+
"
+	linear_47w
+add
+x
+
+	linear_47
+y
+
+	input_175'
+	input_183
+
+
+€
+€*
+name
+
+
"
+	input_183z
+const 
+input_185_axes_0
+
+
+*&
+name
+
+"
+input_185_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ”
+
+layer_norm
+x
+
+	input_183
+axes
+
+input_185_axes_0:
+gamma1
+/
+-model_encoder_layer_7_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_7_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_185
+
+
+€
+€*
+name
+
+
"
+	input_185ä
+linear
+x
+
+	input_185?
+weight5
+3
+1model_encoder_layer_8_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_8_attention_self_query_bias'
+	linear_48
+
+
+€
+€*
+name
+
+
"
+	linear_48à
+linear
+x
+
+	input_185=
+weight3
+1
+/model_encoder_layer_8_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_8_attention_self_key_bias'
+	linear_49
+
+
+€
+€*
+name
+
+
"
+	linear_49b
+const
+var_722
+
+
+*
+name
+
+
+"
+op_722*"
+val
+
+
+
+	
+€ x
+reshape
+x
+
+	linear_49
+shape
+	
+var_722'
+x_99
+
+
+€
+
+ *
+name
+
+
+"
+x_99ä
+linear
+x
+
+	input_185?
+weight5
+3
+1model_encoder_layer_8_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_8_attention_self_value_bias'
+	linear_50
+
+
+€
+€*
+name
+
+
"
+	linear_50b
+const
+var_731
+
+
+*
+name
+
+
+"
+op_731*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_50
+shape
+	
+var_731(
+x_103
+
+
+€
+
+ *
+name
+
+	"
+x_103a
+const
+var_733
+
+
+*
+name
+
+
+"
+op_733*!
+val
+
+
+
+
+
+�b
+const
+var_737
+
+
+*
+name
+
+
+"
+op_737*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_48
+shape
+	
+var_737(
+x_107
+
+
+€
+
+ *
+name
+
+	"
+x_107ƒ
+const)
+!attention_scores_33_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_33_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_33_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_33_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_52_perm_0
+
+
+*)
+name!
+
+"
+transpose_52_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_53_perm_0
+
+
+*)
+name!
+
+"
+transpose_53_perm_0*!
+val
+
+
+
+
+
+�
+	transpose
+x
+
+x_99
+perm
+
+transpose_53_perm_0/
+transpose_73
+
+
+
+ 
+€*"
+name
+
+"
+transpose_73‘
+	transpose
+x	
+
+x_107
+perm
+
+transpose_52_perm_0/
+transpose_74
+
+
+
+€
+ *"
+name
+
+"
+transpose_74†
+matmul
+x
+
+transpose_74
+y
+
+transpose_734
+transpose_x%
+#
+!attention_scores_33_transpose_x_04
+transpose_y%
+#
+!attention_scores_33_transpose_y_07
+attention_scores_33 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_33†
+const)
+!_inversed_attention_scores_35_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_35_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_33*
+y%
+#
+!_inversed_attention_scores_35_y_0A
+_inversed_attention_scores_35 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_35˜
+add&
+x!
+
+_inversed_attention_scores_35
+y
+
+attention_mask_1-
+	input_187 
+
+
+
+€
+€*
+name
+
+
"
+	input_187€
+softmax
+x
+
+	input_187
+axis	
+
+var_8-
+	input_189 
+
+
+
+€
+€*
+name
+
+
"
+	input_189}
+const&
+context_layer_33_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_33_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_33_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_33_transpose_y_0*
+val
+
+
+�…
+	transpose
+x	
+
+x_103
+perm
+	
+var_733/
+transpose_75
+
+
+
+€
+ *"
+name
+
+"
+transpose_75ö
+matmul
+x
+
+	input_189
+y
+
+transpose_751
+transpose_x"
+ 
+context_layer_33_transpose_x_01
+transpose_y"
+ 
+context_layer_33_transpose_y_03
+context_layer_33
+
+
+
+€
+ *&
+name
+
+"
+context_layer_33a
+const
+var_749
+
+
+*
+name
+
+
+"
+op_749*!
+val
+
+
+
+
+
+�b
+const
+var_754
+
+
+*
+name
+
+
+"
+op_754*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_33
+perm
+	
+var_749/
+transpose_72
+
+
+€
+
+ *"
+name
+
+"
+transpose_72€
+reshape
+x
+
+transpose_72
+shape
+	
+var_754'
+	input_191
+
+
+€
+€*
+name
+
+
"
+	input_191è
+linear
+x
+
+	input_191A
+weight7
+5
+3model_encoder_layer_8_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_8_attention_output_dense_bias'
+	linear_51
+
+
+€
+€*
+name
+
+
"
+	linear_51w
+add
+x
+
+	linear_51
+y
+
+	input_185'
+	input_195
+
+
+€
+€*
+name
+
+
"
+	input_195z
+const 
+input_197_axes_0
+
+
+*&
+name
+
+"
+input_197_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¨
+
+layer_norm
+x
+
+	input_195
+axes
+
+input_197_axes_0D
+gamma;
+9
+7model_encoder_layer_8_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_8_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_197
+
+
+€
+€*
+name
+
+
"
+	input_197à
+linear
+x
+
+	input_197=
+weight3
+1
+/model_encoder_layer_8_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_8_intermediate_dense_bias'
+	linear_52
+
+
+€
+€*
+name
+
+
"
+	linear_52e
+const
+input_201_mode_0
+*&
+name
+
+"
+input_201_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_52
+mode
+
+input_201_mode_0'
+	input_201
+
+
+€
+€*
+name
+
+
"
+	input_201Ô
+linear
+x
+
+	input_2017
+weight-
++
+)model_encoder_layer_8_output_dense_weight3
+bias+
+)
+'model_encoder_layer_8_output_dense_bias'
+	linear_53
+
+
+€
+€*
+name
+
+
"
+	linear_53w
+add
+x
+
+	linear_53
+y
+
+	input_197'
+	input_205
+
+
+€
+€*
+name
+
+
"
+	input_205z
+const 
+input_207_axes_0
+
+
+*&
+name
+
+"
+input_207_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ”
+
+layer_norm
+x
+
+	input_205
+axes
+
+input_207_axes_0:
+gamma1
+/
+-model_encoder_layer_8_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_8_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_207
+
+
+€
+€*
+name
+
+
"
+	input_207ä
+linear
+x
+
+	input_207?
+weight5
+3
+1model_encoder_layer_9_attention_self_query_weight;
+bias3
+1
+/model_encoder_layer_9_attention_self_query_bias'
+	linear_54
+
+
+€
+€*
+name
+
+
"
+	linear_54à
+linear
+x
+
+	input_207=
+weight3
+1
+/model_encoder_layer_9_attention_self_key_weight9
+bias1
+/
+-model_encoder_layer_9_attention_self_key_bias'
+	linear_55
+
+
+€
+€*
+name
+
+
"
+	linear_55b
+const
+var_799
+
+
+*
+name
+
+
+"
+op_799*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_55
+shape
+	
+var_799(
+x_111
+
+
+€
+
+ *
+name
+
+	"
+x_111ä
+linear
+x
+
+	input_207?
+weight5
+3
+1model_encoder_layer_9_attention_self_value_weight;
+bias3
+1
+/model_encoder_layer_9_attention_self_value_bias'
+	linear_56
+
+
+€
+€*
+name
+
+
"
+	linear_56b
+const
+var_808
+
+
+*
+name
+
+
+"
+op_808*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_56
+shape
+	
+var_808(
+x_115
+
+
+€
+
+ *
+name
+
+	"
+x_115a
+const
+var_810
+
+
+*
+name
+
+
+"
+op_810*!
+val
+
+
+
+
+
+�b
+const
+var_814
+
+
+*
+name
+
+
+"
+op_814*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_54
+shape
+	
+var_814(
+x_119
+
+
+€
+
+ *
+name
+
+	"
+x_119ƒ
+const)
+!attention_scores_37_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_37_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_37_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_37_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_54_perm_0
+
+
+*)
+name!
+
+"
+transpose_54_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_55_perm_0
+
+
+*)
+name!
+
+"
+transpose_55_perm_0*!
+val
+
+
+
+
+
+�‘
+	transpose
+x	
+
+x_111
+perm
+
+transpose_55_perm_0/
+transpose_69
+
+
+
+ 
+€*"
+name
+
+"
+transpose_69‘
+	transpose
+x	
+
+x_119
+perm
+
+transpose_54_perm_0/
+transpose_70
+
+
+
+€
+ *"
+name
+
+"
+transpose_70†
+matmul
+x
+
+transpose_70
+y
+
+transpose_694
+transpose_x%
+#
+!attention_scores_37_transpose_x_04
+transpose_y%
+#
+!attention_scores_37_transpose_y_07
+attention_scores_37 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_37†
+const)
+!_inversed_attention_scores_39_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_39_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_37*
+y%
+#
+!_inversed_attention_scores_39_y_0A
+_inversed_attention_scores_39 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_39˜
+add&
+x!
+
+_inversed_attention_scores_39
+y
+
+attention_mask_1-
+	input_209 
+
+
+
+€
+€*
+name
+
+
"
+	input_209€
+softmax
+x
+
+	input_209
+axis	
+
+var_8-
+	input_211 
+
+
+
+€
+€*
+name
+
+
"
+	input_211}
+const&
+context_layer_37_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_37_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_37_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_37_transpose_y_0*
+val
+
+
+�…
+	transpose
+x	
+
+x_115
+perm
+	
+var_810/
+transpose_71
+
+
+
+€
+ *"
+name
+
+"
+transpose_71ö
+matmul
+x
+
+	input_211
+y
+
+transpose_711
+transpose_x"
+ 
+context_layer_37_transpose_x_01
+transpose_y"
+ 
+context_layer_37_transpose_y_03
+context_layer_37
+
+
+
+€
+ *&
+name
+
+"
+context_layer_37a
+const
+var_826
+
+
+*
+name
+
+
+"
+op_826*!
+val
+
+
+
+
+
+�b
+const
+var_831
+
+
+*
+name
+
+
+"
+op_831*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_37
+perm
+	
+var_826/
+transpose_68
+
+
+€
+
+ *"
+name
+
+"
+transpose_68€
+reshape
+x
+
+transpose_68
+shape
+	
+var_831'
+	input_213
+
+
+€
+€*
+name
+
+
"
+	input_213è
+linear
+x
+
+	input_213A
+weight7
+5
+3model_encoder_layer_9_attention_output_dense_weight=
+bias5
+3
+1model_encoder_layer_9_attention_output_dense_bias'
+	linear_57
+
+
+€
+€*
+name
+
+
"
+	linear_57w
+add
+x
+
+	linear_57
+y
+
+	input_207'
+	input_217
+
+
+€
+€*
+name
+
+
"
+	input_217z
+const 
+input_219_axes_0
+
+
+*&
+name
+
+"
+input_219_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¨
+
+layer_norm
+x
+
+	input_217
+axes
+
+input_219_axes_0D
+gamma;
+9
+7model_encoder_layer_9_attention_output_LayerNorm_weightA
+beta9
+7
+5model_encoder_layer_9_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_219
+
+
+€
+€*
+name
+
+
"
+	input_219à
+linear
+x
+
+	input_219=
+weight3
+1
+/model_encoder_layer_9_intermediate_dense_weight9
+bias1
+/
+-model_encoder_layer_9_intermediate_dense_bias'
+	linear_58
+
+
+€
+€*
+name
+
+
"
+	linear_58e
+const
+input_223_mode_0
+*&
+name
+
+"
+input_223_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_58
+mode
+
+input_223_mode_0'
+	input_223
+
+
+€
+€*
+name
+
+
"
+	input_223Ô
+linear
+x
+
+	input_2237
+weight-
++
+)model_encoder_layer_9_output_dense_weight3
+bias+
+)
+'model_encoder_layer_9_output_dense_bias'
+	linear_59
+
+
+€
+€*
+name
+
+
"
+	linear_59w
+add
+x
+
+	linear_59
+y
+
+	input_219'
+	input_227
+
+
+€
+€*
+name
+
+
"
+	input_227z
+const 
+input_229_axes_0
+
+
+*&
+name
+
+"
+input_229_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ”
+
+layer_norm
+x
+
+	input_227
+axes
+
+input_229_axes_0:
+gamma1
+/
+-model_encoder_layer_9_output_LayerNorm_weight7
+beta/
+-
++model_encoder_layer_9_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_229
+
+
+€
+€*
+name
+
+
"
+	input_229æ
+linear
+x
+
+	input_229@
+weight6
+4
+2model_encoder_layer_10_attention_self_query_weight<
+bias4
+2
+0model_encoder_layer_10_attention_self_query_bias'
+	linear_60
+
+
+€
+€*
+name
+
+
"
+	linear_60â
+linear
+x
+
+	input_229>
+weight4
+2
+0model_encoder_layer_10_attention_self_key_weight:
+bias2
+0
+.model_encoder_layer_10_attention_self_key_bias'
+	linear_61
+
+
+€
+€*
+name
+
+
"
+	linear_61b
+const
+var_876
+
+
+*
+name
+
+
+"
+op_876*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_61
+shape
+	
+var_876(
+x_123
+
+
+€
+
+ *
+name
+
+	"
+x_123æ
+linear
+x
+
+	input_229@
+weight6
+4
+2model_encoder_layer_10_attention_self_value_weight<
+bias4
+2
+0model_encoder_layer_10_attention_self_value_bias'
+	linear_62
+
+
+€
+€*
+name
+
+
"
+	linear_62b
+const
+var_885
+
+
+*
+name
+
+
+"
+op_885*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_62
+shape
+	
+var_885(
+x_127
+
+
+€
+
+ *
+name
+
+	"
+x_127a
+const
+var_887
+
+
+*
+name
+
+
+"
+op_887*!
+val
+
+
+
+
+
+�b
+const
+var_891
+
+
+*
+name
+
+
+"
+op_891*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_60
+shape
+	
+var_891(
+x_131
+
+
+€
+
+ *
+name
+
+	"
+x_131ƒ
+const)
+!attention_scores_41_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_41_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_41_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_41_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_56_perm_0
+
+
+*)
+name!
+
+"
+transpose_56_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_57_perm_0
+
+
+*)
+name!
+
+"
+transpose_57_perm_0*!
+val
+
+
+
+
+
+�‘
+	transpose
+x	
+
+x_123
+perm
+
+transpose_57_perm_0/
+transpose_65
+
+
+
+ 
+€*"
+name
+
+"
+transpose_65‘
+	transpose
+x	
+
+x_131
+perm
+
+transpose_56_perm_0/
+transpose_66
+
+
+
+€
+ *"
+name
+
+"
+transpose_66†
+matmul
+x
+
+transpose_66
+y
+
+transpose_654
+transpose_x%
+#
+!attention_scores_41_transpose_x_04
+transpose_y%
+#
+!attention_scores_41_transpose_y_07
+attention_scores_41 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_41†
+const)
+!_inversed_attention_scores_43_y_0
+*7
+name/
+'
+%"#
+!_inversed_attention_scores_43_y_0*
+val
+
+
+
+
+ó5>Ç
+mul
+x
+
+attention_scores_41*
+y%
+#
+!_inversed_attention_scores_43_y_0A
+_inversed_attention_scores_43 
+
+
+
+€
+€*3
+name+
+#
+!"
+_inversed_attention_scores_43˜
+add&
+x!
+
+_inversed_attention_scores_43
+y
+
+attention_mask_1-
+	input_231 
+
+
+
+€
+€*
+name
+
+
"
+	input_231€
+softmax
+x
+
+	input_231
+axis	
+
+var_8-
+	input_233 
+
+
+
+€
+€*
+name
+
+
"
+	input_233}
+const&
+context_layer_41_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_41_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_41_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_41_transpose_y_0*
+val
+
+
+�…
+	transpose
+x	
+
+x_127
+perm
+	
+var_887/
+transpose_67
+
+
+
+€
+ *"
+name
+
+"
+transpose_67ö
+matmul
+x
+
+	input_233
+y
+
+transpose_671
+transpose_x"
+ 
+context_layer_41_transpose_x_01
+transpose_y"
+ 
+context_layer_41_transpose_y_03
+context_layer_41
+
+
+
+€
+ *&
+name
+
+"
+context_layer_41a
+const
+var_903
+
+
+*
+name
+
+
+"
+op_903*!
+val
+
+
+
+
+
+�b
+const
+var_908
+
+
+*
+name
+
+
+"
+op_908*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_41
+perm
+	
+var_903/
+transpose_64
+
+
+€
+
+ *"
+name
+
+"
+transpose_64€
+reshape
+x
+
+transpose_64
+shape
+	
+var_908'
+	input_235
+
+
+€
+€*
+name
+
+
"
+	input_235ê
+linear
+x
+
+	input_235B
+weight8
+6
+4model_encoder_layer_10_attention_output_dense_weight>
+bias6
+4
+2model_encoder_layer_10_attention_output_dense_bias'
+	linear_63
+
+
+€
+€*
+name
+
+
"
+	linear_63w
+add
+x
+
+	linear_63
+y
+
+	input_229'
+	input_239
+
+
+€
+€*
+name
+
+
"
+	input_239z
+const 
+input_241_axes_0
+
+
+*&
+name
+
+"
+input_241_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿª
+
+layer_norm
+x
+
+	input_239
+axes
+
+input_241_axes_0E
+gamma<
+:
+8model_encoder_layer_10_attention_output_LayerNorm_weightB
+beta:
+8
+6model_encoder_layer_10_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_241
+
+
+€
+€*
+name
+
+
"
+	input_241â
+linear
+x
+
+	input_241>
+weight4
+2
+0model_encoder_layer_10_intermediate_dense_weight:
+bias2
+0
+.model_encoder_layer_10_intermediate_dense_bias'
+	linear_64
+
+
+€
+€*
+name
+
+
"
+	linear_64e
+const
+input_245_mode_0
+*&
+name
+
+"
+input_245_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_64
+mode
+
+input_245_mode_0'
+	input_245
+
+
+€
+€*
+name
+
+
"
+	input_245Ö
+linear
+x
+
+	input_2458
+weight.
+,
+*model_encoder_layer_10_output_dense_weight4
+bias,
+*
+(model_encoder_layer_10_output_dense_bias'
+	linear_65
+
+
+€
+€*
+name
+
+
"
+	linear_65w
+add
+x
+
+	linear_65
+y
+
+	input_241'
+	input_249
+
+
+€
+€*
+name
+
+
"
+	input_249z
+const 
+input_251_axes_0
+
+
+*&
+name
+
+"
+input_251_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ–
+
+layer_norm
+x
+
+	input_249
+axes
+
+input_251_axes_0;
+gamma2
+0
+.model_encoder_layer_10_output_LayerNorm_weight8
+beta0
+.
+,model_encoder_layer_10_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_251
+
+
+€
+€*
+name
+
+
"
+	input_251æ
+linear
+x
+
+	input_251@
+weight6
+4
+2model_encoder_layer_11_attention_self_query_weight<
+bias4
+2
+0model_encoder_layer_11_attention_self_query_bias'
+	linear_66
+
+
+€
+€*
+name
+
+
"
+	linear_66â
+linear
+x
+
+	input_251>
+weight4
+2
+0model_encoder_layer_11_attention_self_key_weight:
+bias2
+0
+.model_encoder_layer_11_attention_self_key_bias'
+	linear_67
+
+
+€
+€*
+name
+
+
"
+	linear_67b
+const
+var_953
+
+
+*
+name
+
+
+"
+op_953*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_67
+shape
+	
+var_953(
+x_135
+
+
+€
+
+ *
+name
+
+	"
+x_135æ
+linear
+x
+
+	input_251@
+weight6
+4
+2model_encoder_layer_11_attention_self_value_weight<
+bias4
+2
+0model_encoder_layer_11_attention_self_value_bias'
+	linear_68
+
+
+€
+€*
+name
+
+
"
+	linear_68b
+const
+var_962
+
+
+*
+name
+
+
+"
+op_962*"
+val
+
+
+
+	
+€ z
+reshape
+x
+
+	linear_68
+shape
+	
+var_962(
+x_139
+
+
+€
+
+ *
+name
+
+	"
+x_139a
+const
+var_964
+
+
+*
+name
+
+
+"
+op_964*!
+val
+
+
+
+
+
+�b
+const
+var_968
+
+
+*
+name
+
+
+"
+op_968*"
+val
+
+
+
+	
+€ r
+reshape
+x
+
+	linear_66
+shape
+	
+var_968$
+x
+
+
+€
+
+ *
+name
+
+"
+xƒ
+const)
+!attention_scores_45_transpose_x_0
+*7
+name/
+'
+%"#
+!attention_scores_45_transpose_x_0*
+val
+
+
+�ƒ
+const)
+!attention_scores_45_transpose_y_0
+*7
+name/
+'
+%"#
+!attention_scores_45_transpose_y_0*
+val
+
+
+�z
+const#
+transpose_58_perm_0
+
+
+*)
+name!
+
+"
+transpose_58_perm_0*!
+val
+
+
+
+
+
+�z
+const#
+transpose_59_perm_0
+
+
+*)
+name!
+
+"
+transpose_59_perm_0*!
+val
+
+
+
+
+
+�‘
+	transpose
+x	
+
+x_135
+perm
+
+transpose_59_perm_0/
+transpose_61
+
+
+
+ 
+€*"
+name
+
+"
+transpose_61
+	transpose
+
+x
+
+x
+perm
+
+transpose_58_perm_0/
+transpose_62
+
+
+
+€
+ *"
+name
+
+"
+transpose_62†
+matmul
+x
+
+transpose_62
+y
+
+transpose_614
+transpose_x%
+#
+!attention_scores_45_transpose_x_04
+transpose_y%
+#
+!attention_scores_45_transpose_y_07
+attention_scores_45 
+
+
+
+€
+€*)
+name!
+
+"
+attention_scores_45€
+const&
+_inversed_attention_scores_y_0
+*4
+name,
+$
+"" 
+_inversed_attention_scores_y_0*
+val
+
+
+
+
+ó5>¾
+mul
+x
+
+attention_scores_45'
+y"
+ 
+_inversed_attention_scores_y_0>
+_inversed_attention_scores 
+
+
+
+€
+€*0
+name(
+ 
+"
+_inversed_attention_scores•
+add#
+x
+
+_inversed_attention_scores
+y
+
+attention_mask_1-
+	input_253 
+
+
+
+€
+€*
+name
+
+
"
+	input_253€
+softmax
+x
+
+	input_253
+axis	
+
+var_8-
+	input_255 
+
+
+
+€
+€*
+name
+
+
"
+	input_255}
+const&
+context_layer_45_transpose_x_0
+*4
+name,
+$
+"" 
+context_layer_45_transpose_x_0*
+val
+
+
+�}
+const&
+context_layer_45_transpose_y_0
+*4
+name,
+$
+"" 
+context_layer_45_transpose_y_0*
+val
+
+
+�…
+	transpose
+x	
+
+x_139
+perm
+	
+var_964/
+transpose_63
+
+
+
+€
+ *"
+name
+
+"
+transpose_63ö
+matmul
+x
+
+	input_255
+y
+
+transpose_631
+transpose_x"
+ 
+context_layer_45_transpose_x_01
+transpose_y"
+ 
+context_layer_45_transpose_y_03
+context_layer_45
+
+
+
+€
+ *&
+name
+
+"
+context_layer_45a
+const
+var_980
+
+
+*
+name
+
+
+"
+op_980*!
+val
+
+
+
+
+
+�b
+const
+var_985
+
+
+*
+name
+
+
+"
+op_985*"
+val
+
+
+
+	
+€€
+	transpose
+x
+
+context_layer_45
+perm
+	
+var_980/
+transpose_60
+
+
+€
+
+ *"
+name
+
+"
+transpose_60€
+reshape
+x
+
+transpose_60
+shape
+	
+var_985'
+	input_257
+
+
+€
+€*
+name
+
+
"
+	input_257ê
+linear
+x
+
+	input_257B
+weight8
+6
+4model_encoder_layer_11_attention_output_dense_weight>
+bias6
+4
+2model_encoder_layer_11_attention_output_dense_bias'
+	linear_69
+
+
+€
+€*
+name
+
+
"
+	linear_69w
+add
+x
+
+	linear_69
+y
+
+	input_251'
+	input_261
+
+
+€
+€*
+name
+
+
"
+	input_261z
+const 
+input_263_axes_0
+
+
+*&
+name
+
+"
+input_263_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿª
+
+layer_norm
+x
+
+	input_261
+axes
+
+input_263_axes_0E
+gamma<
+:
+8model_encoder_layer_11_attention_output_LayerNorm_weightB
+beta:
+8
+6model_encoder_layer_11_attention_output_LayerNorm_bias
+epsilon
+
+
+var_10'
+	input_263
+
+
+€
+€*
+name
+
+
"
+	input_263â
+linear
+x
+
+	input_263>
+weight4
+2
+0model_encoder_layer_11_intermediate_dense_weight:
+bias2
+0
+.model_encoder_layer_11_intermediate_dense_bias'
+	linear_70
+
+
+€
+€*
+name
+
+
"
+	linear_70e
+const
+input_267_mode_0
+*&
+name
+
+"
+input_267_mode_0*
+val
+
+	"
+EXACT‚
+gelu
+x
+
+	linear_70
+mode
+
+input_267_mode_0'
+	input_267
+
+
+€
+€*
+name
+
+
"
+	input_267Ö
+linear
+x
+
+	input_2678
+weight.
+,
+*model_encoder_layer_11_output_dense_weight4
+bias,
+*
+(model_encoder_layer_11_output_dense_bias'
+	linear_71
+
+
+€
+€*
+name
+
+
"
+	linear_71w
+add
+x
+
+	linear_71
+y
+
+	input_263'
+	input_271
+
+
+€
+€*
+name
+
+
"
+	input_271‚
+const$
+hidden_states_axes_0
+
+
+**
+name"
+
+"
+hidden_states_axes_0*'
+val 
+
+
+
+
+
+ÿÿÿÿÿÿÿÿÿ¦
+
+layer_norm
+x
+
+	input_271 
+axes
+
+hidden_states_axes_0;
+gamma2
+0
+.model_encoder_layer_11_output_LayerNorm_weight8
+beta0
+.
+,model_encoder_layer_11_output_LayerNorm_bias
+epsilon
+
+
+var_10/
+last_hidden_state
+
+
+€
+€*#
+name
+
+"
+
hidden_statesu
+const!
+input_273_begin_0
+
+
+*'
+name
+
+"
+input_273_begin_0* 
+val
+
+
+	
+
+���r
+const
+input_273_end_0
+
+
+*%
+name
+
+"
+input_273_end_0*!
+val
+
+
+
+
+
+€{
+const$
+input_273_end_mask_0
+
+
+**
+name"
+
+"
+input_273_end_mask_0* 
+val
+
+
+	
+
+�ƒ
+const(
+input_273_squeeze_mask_0
+
+
+*.
+name&
+
+"
+input_273_squeeze_mask_0* 
+val
+
+
+	
+
+��ÿ
+slice_by_index
+x
+
+last_hidden_state
+begin
+
+input_273_begin_0
+end
+
+input_273_end_0$
+end_mask
+
+input_273_end_mask_0,
+squeeze_mask
+
+input_273_squeeze_mask_0 
+	input_273
+
+
+€*
+name
+
+
"
+	input_273­
+linear
+x
+
+	input_273'
+weight
+
+model_pooler_dense_weight#
+bias
+
+model_pooler_dense_bias 
+	linear_72
+
+
+€*
+name
+
+
"
+	linear_72_
+tanh
+x
+
+	linear_72$
+
pooler_output
+
+
+€*
+name
+
+"	
+op_1020"å
+	buildInfo×"
+
+
+Ä"Á
+6
+!
+
+"
+coremltools-version
+	
+"
+7.1
+@
+)
+!
+"
+coremltools-component-torch
+
+	"
+2.1.0
+E
+(
+ 
+"
+coremltools-source-dialect
+
+"
+TorchScript
\ No newline at end of file
diff --git a/Sources/SwiftNLP/Resources/float32_model.mlpackage/Data/com.apple.CoreML/weights/weight.bin b/Sources/SwiftNLP/Resources/float32_model.mlpackage/Data/com.apple.CoreML/weights/weight.bin
new file mode 100644
index 0000000000000000000000000000000000000000..71d86e38eb7ae4707228b8af0888c01ab9b7c4a5
Binary files /dev/null and b/Sources/SwiftNLP/Resources/float32_model.mlpackage/Data/com.apple.CoreML/weights/weight.bin differ
diff --git a/Sources/SwiftNLP/Resources/float32_model.mlpackage/Manifest.json b/Sources/SwiftNLP/Resources/float32_model.mlpackage/Manifest.json
new file mode 100644
index 0000000000000000000000000000000000000000..2eebcce458da7726c52b94969188388ac7f645b7
--- /dev/null
+++ b/Sources/SwiftNLP/Resources/float32_model.mlpackage/Manifest.json
@@ -0,0 +1,18 @@
+{
+    "fileFormatVersion": "1.0.0",
+    "itemInfoEntries": {
+        "63B0B880-B145-44C1-9871-506E9D0C9935": {
+            "author": "com.apple.CoreML",
+            "description": "CoreML Model Specification",
+            "name": "model.mlmodel",
+            "path": "com.apple.CoreML/model.mlmodel"
+        },
+        "98B220AA-0669-4838-B6C4-38F80D1282C2": {
+            "author": "com.apple.CoreML",
+            "description": "CoreML Model Weights",
+            "name": "weights",
+            "path": "com.apple.CoreML/weights"
+        }
+    },
+    "rootModelIdentifier": "63B0B880-B145-44C1-9871-506E9D0C9935"
+}
diff --git a/Sources/SwiftNLPGenericLLMMacros/Macros.swift b/Sources/SwiftNLPGenericLLMMacros/Macros.swift
new file mode 100644
index 0000000000000000000000000000000000000000..b756250edf1afa6c71a8e6f3cdcf8a22505c9179
--- /dev/null
+++ b/Sources/SwiftNLPGenericLLMMacros/Macros.swift
@@ -0,0 +1,173 @@
+import CoreML
+import SwiftSyntax
+import SwiftSyntaxMacros
+
+
+@available(macOS 12, iOS 15.0, tvOS 17.0, watchOS 10.0, *)
+public struct LLMModelNameValidation: ExpressionMacro {
+    /**
+     Example expansion:
+     try! {
+         switch self.model {
+         case "gte-small":
+             self.inputDimention = 128;
+         case "all_MiniLM_L6_v2":
+             self.inputDimention = 512;
+         default:
+             throw fatalError("Model is not valid");
+         }
+     }();
+     */
+    
+    public static func expansion(
+        of node:  some FreestandingMacroExpansionSyntax,
+        in context: some MacroExpansionContext
+    ) throws -> ExprSyntax {
+        
+        // generate code
+        var macro = "try! { switch self.model { "
+        
+        for (k, v) in LLM_MODEL_CLASSES {
+            // extract values
+            let model_dim = v[LLMModelClassesKey.InputDimension]!
+            macro +=
+                """
+                case \"\(k)\":
+                    self.inputDimention = \(model_dim);
+                """
+        }
+        
+        macro += "default: throw fatalError(\"Model is not valid\"); } }();"
+        
+        return ExprSyntax(stringLiteral: macro)
+    }
+}
+
+
+@available(macOS 12, iOS 15.0, tvOS 17.0, watchOS 10.0, *)
+public struct LLMModelPredictionCases: ExpressionMacro {
+    /**
+     Example expansion:
+     {
+         switch self.model {
+         case "all_MiniLM_L6_v2":
+             let input_class = all_MiniLM_L6_v2Input(input_ids: inputIds, attention_mask: attentionMask);
+             output = try! LLMModel.prediction(input: input_class).featureValue(for: "embeddings")!.multiArrayValue!;
+         case "gte-small":
+             let input_class = float32_modelInput(input_ids: inputIds, attention_mask: attentionMask);
+             output = try! LLMModel.prediction(input: input_class).featureValue(for: "pooler_output")!.multiArrayValue!;
+         default:
+             output = nil;
+         }
+     }();
+     */
+    
+    public static func expansion(
+        of node:  some FreestandingMacroExpansionSyntax,
+        in context: some MacroExpansionContext
+    ) throws -> ExprSyntax {
+        
+        // get first argument from macro invocation
+        guard let input_arg = node.argumentList.first?.expression,
+            let segments = input_arg.as(StringLiteralExprSyntax.self)?.segments,
+            segments.count == 1,
+            case .stringSegment(let input_literal_segment)? = segments.first
+        else {
+            throw fatalError("Bad argument to macro.")
+        }
+        
+        // get second argument from macro invocation
+        guard let attention_arg = node.argumentList.dropFirst().first?.expression,
+            let segments = attention_arg.as(StringLiteralExprSyntax.self)?.segments,
+            segments.count == 1,
+            case .stringSegment(let attn_literal_segment)? = segments.first
+        else {
+            throw fatalError("Bad argument to macro.")
+        }
+        
+        // get third argument from macro invocation
+        guard let output_arg = node.argumentList.dropFirst().dropFirst().first?.expression,
+            let segments = output_arg.as(StringLiteralExprSyntax.self)?.segments,
+            segments.count == 1,
+            case .stringSegment(let output_literal_segment)? = segments.first
+        else {
+            throw fatalError("Bad argument to macro.")
+        }
+        
+        // extract parameter values
+        let model_input = input_literal_segment.content.text
+        let model_attn = attn_literal_segment.content.text
+        let model_output = output_literal_segment.content.text
+        
+        // generate code
+        var macro = "{ switch self.model { "
+        
+        for (k, v) in LLM_MODEL_CLASSES {
+            let model_class = v[LLMModelClassesKey.Input]!
+            let model_feature = v[LLMModelClassesKey.FeatureName]!
+            macro +=
+                """
+                case \"\(k)\":
+                    let input_class = \(model_class)(input_ids: \(model_input), attention_mask: \(model_attn));
+                    \(model_output) = try! LLMModel.prediction(input: input_class).featureValue(for: \"\(model_feature)\")!.multiArrayValue!;
+                """
+        }
+        
+        macro += "default: output = nil; } }();"
+        
+        return ExprSyntax(stringLiteral: macro)
+            
+    }
+}
+
+
+@available(macOS 12, iOS 15.0, tvOS 17.0, watchOS 10.0, *)
+public enum LLMPredictionFunctions: DeclarationMacro {
+    public static func expansion(
+        of node: some FreestandingMacroExpansionSyntax,
+        in context: some MacroExpansionContext
+    ) throws -> [DeclSyntax] {
+        
+        var ret: String = "";
+        
+        // generate code
+        for (k, v) in LLM_MODEL_CLASSES {
+            
+            // extract values
+            let model_type_name = v[LLMModelClassesKey.Model]!;
+            let model_input_name = v[LLMModelClassesKey.Input]!;
+            let model_output_name = v[LLMModelClassesKey.Output]!;
+            let model_url = v[LLMModelClassesKey.URL]!;
+        
+            ret +=
+                """
+                public static func prediction(input: \(model_input_name)) throws -> \(model_output_name) {
+                    let model = try \(model_type_name)(contentsOf: URL(fileURLWithPath: \"Sources/SwiftNLP/Models/\(model_url)\"));
+                    return try model.prediction(input: input, options: MLPredictionOptions())
+                }
+
+                public static func prediction(input: \(model_input_name), options: MLPredictionOptions) throws -> \(model_output_name) {
+                    let model = try \(model_type_name)(contentsOf: URL(fileURLWithPath: \"Sources/SwiftNLP/Models/\(model_url)\"));
+                    let outFeatures: MLFeatureProvider = try model.prediction(input: input, options:options)
+                    return \(model_output_name)(features: outFeatures)
+                }
+
+                @available(macOS 13.6, iOS 17.0, tvOS 17.0, watchOS 10.0, *)
+                public static func prediction(input: \(model_input_name), options: MLPredictionOptions = MLPredictionOptions()) async throws -> \(model_output_name) {
+                    let model = try \(model_type_name)(contentsOf: URL(fileURLWithPath: \"Sources/SwiftNLP/Models/\(model_url)\"));
+                    let outFeatures: MLFeatureProvider? = try await model.prediction(input: input, options:options)
+                    return \(model_output_name)(features: outFeatures!)
+                }
+
+                public static func predictions(inputs: [\(model_input_name)], options: MLPredictionOptions = MLPredictionOptions()) throws -> [\(model_output_name)] {
+                    let model = try \(model_type_name)(contentsOf: URL(fileURLWithPath: \"Sources/SwiftNLP/Models/\(model_url)\"));
+                    let res = try model.predictions(inputs: inputs, options: options);
+                    return res;
+                }
+                
+                """;
+        }
+
+        return [DeclSyntax(stringLiteral: ret)];
+    }
+}
diff --git a/Sources/SwiftNLPGenericLLMMacros/Main.swift b/Sources/SwiftNLPGenericLLMMacros/Main.swift
new file mode 100644
index 0000000000000000000000000000000000000000..a1a9228af8c740e5644f388e9e93a9389c69b780
--- /dev/null
+++ b/Sources/SwiftNLPGenericLLMMacros/Main.swift
@@ -0,0 +1,12 @@
+import SwiftCompilerPlugin
+import SwiftSyntaxMacros
+
+@main
+struct SwiftNLPGenericLLMMacros: CompilerPlugin {
+    init() {}
+    var providingMacros: [SwiftSyntaxMacros.Macro.Type] = [
+        LLMPredictionFunctions.self,
+        LLMModelPredictionCases.self,
+        LLMModelNameValidation.self
+    ]
+}
diff --git a/Sources/SwiftNLPGenericLLMMacros/ModelClasses.swift b/Sources/SwiftNLPGenericLLMMacros/ModelClasses.swift
new file mode 100644
index 0000000000000000000000000000000000000000..2eb0950cb8d12335d0fd30b06375cff9e76c5723
--- /dev/null
+++ b/Sources/SwiftNLPGenericLLMMacros/ModelClasses.swift
@@ -0,0 +1,27 @@
+enum LLMModelClassesKey {
+    case Input
+    case Output
+    case Model
+    case FeatureName
+    case URL
+    case InputDimension
+}
+
+let LLM_MODEL_CLASSES: [String: [LLMModelClassesKey: Any]] = [
+    "all_MiniLM_L6_v2": [
+        LLMModelClassesKey.Input: all_MiniLM_L6_v2Input.self,
+        LLMModelClassesKey.Output: all_MiniLM_L6_v2Output.self,
+        LLMModelClassesKey.Model: all_MiniLM_L6_v2.self,
+        LLMModelClassesKey.FeatureName: "embeddings",
+        LLMModelClassesKey.URL: "all-MiniLM-L6-v2.mlmodelc",
+        LLMModelClassesKey.InputDimension: 512
+    ],
+    "gte-small": [
+        LLMModelClassesKey.Input: float32_modelInput.self,
+        LLMModelClassesKey.Output: float32_modelOutput.self,
+        LLMModelClassesKey.Model: float32_model.self,
+        LLMModelClassesKey.FeatureName: "pooler_output",
+        LLMModelClassesKey.URL: "float32_model.mlmodelc",
+        LLMModelClassesKey.InputDimension: 128
+    ]
+]
diff --git a/Sources/SwiftNLPGenericLLMMacros/all-MiniLM-L6-v2.swift b/Sources/SwiftNLPGenericLLMMacros/all-MiniLM-L6-v2.swift
new file mode 100644
index 0000000000000000000000000000000000000000..c2f0c441add7ef7602ae99c4a202576c29faa97b
--- /dev/null
+++ b/Sources/SwiftNLPGenericLLMMacros/all-MiniLM-L6-v2.swift
@@ -0,0 +1,306 @@
+//
+// all_MiniLM_L6_v2.swift
+//
+// This file was automatically generated and should not be edited.
+//
+
+import CoreML
+
+
+/// Model Prediction Input Type
+@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *)
+class all_MiniLM_L6_v2Input : MLFeatureProvider {
+
+    /// input_ids as 1 by 512 matrix of floats
+    var input_ids: MLMultiArray
+
+    /// attention_mask as 1 by 512 matrix of floats
+    var attention_mask: MLMultiArray
+
+    var featureNames: Set<String> {
+        get {
+            return ["input_ids", "attention_mask"]
+        }
+    }
+    
+    func featureValue(for featureName: String) -> MLFeatureValue? {
+        if (featureName == "input_ids") {
+            return MLFeatureValue(multiArray: input_ids)
+        }
+        if (featureName == "attention_mask") {
+            return MLFeatureValue(multiArray: attention_mask)
+        }
+        return nil
+    }
+    
+    init(input_ids: MLMultiArray, attention_mask: MLMultiArray) {
+        self.input_ids = input_ids
+        self.attention_mask = attention_mask
+    }
+
+    convenience init(input_ids: MLShapedArray<Float>, attention_mask: MLShapedArray<Float>) {
+        self.init(input_ids: MLMultiArray(input_ids), attention_mask: MLMultiArray(attention_mask))
+    }
+
+}
+
+
+/// Model Prediction Output Type
+@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *)
+class all_MiniLM_L6_v2Output : MLFeatureProvider {
+
+    /// Source provided by CoreML
+    private let provider : MLFeatureProvider
+
+    /// embeddings as multidimensional array of floats
+    var embeddings: MLMultiArray {
+        return self.provider.featureValue(for: "embeddings")!.multiArrayValue!
+    }
+
+    /// embeddings as multidimensional array of floats
+    var embeddingsShapedArray: MLShapedArray<Float> {
+        return MLShapedArray<Float>(self.embeddings)
+    }
+
+    var featureNames: Set<String> {
+        return self.provider.featureNames
+    }
+    
+    func featureValue(for featureName: String) -> MLFeatureValue? {
+        return self.provider.featureValue(for: featureName)
+    }
+
+    init(embeddings: MLMultiArray) {
+        self.provider = try! MLDictionaryFeatureProvider(dictionary: ["embeddings" : MLFeatureValue(multiArray: embeddings)])
+    }
+
+    init(features: MLFeatureProvider) {
+        self.provider = features
+    }
+}
+
+
+/// Class for model loading and prediction
+@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *)
+class all_MiniLM_L6_v2 {
+    let model: MLModel
+
+    /// URL of model assuming it was installed in the same bundle as this class
+    class var urlOfModelInThisBundle : URL {
+        let bundle = Bundle(for: self)
+        return bundle.url(forResource: "all-MiniLM-L6-v2", withExtension:"mlmodelc")!
+    }
+
+    /**
+        Construct all_MiniLM_L6_v2 instance with an existing MLModel object.
+
+        Usually the application does not use this initializer unless it makes a subclass of all_MiniLM_L6_v2.
+        Such application may want to use `MLModel(contentsOfURL:configuration:)` and `all_MiniLM_L6_v2.urlOfModelInThisBundle` to create a MLModel object to pass-in.
+
+        - parameters:
+          - model: MLModel object
+    */
+    init(model: MLModel) {
+        self.model = model
+    }
+
+    /**
+        Construct a model with configuration
+
+        - parameters:
+           - configuration: the desired model configuration
+
+        - throws: an NSError object that describes the problem
+    */
+    convenience init(configuration: MLModelConfiguration = MLModelConfiguration()) throws {
+        try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration)
+    }
+
+    /**
+        Construct all_MiniLM_L6_v2 instance with explicit path to mlmodelc file
+        - parameters:
+           - modelURL: the file url of the model
+
+        - throws: an NSError object that describes the problem
+    */
+    convenience init(contentsOf modelURL: URL) throws {
+        try self.init(model: MLModel(contentsOf: modelURL))
+    }
+
+    /**
+        Construct a model with URL of the .mlmodelc directory and configuration
+
+        - parameters:
+           - modelURL: the file url of the model
+           - configuration: the desired model configuration
+
+        - throws: an NSError object that describes the problem
+    */
+    convenience init(contentsOf modelURL: URL, configuration: MLModelConfiguration) throws {
+        try self.init(model: MLModel(contentsOf: modelURL, configuration: configuration))
+    }
+
+    /**
+        Construct all_MiniLM_L6_v2 instance asynchronously with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - configuration: the desired model configuration
+          - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
+    */
+    class func load(configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<all_MiniLM_L6_v2, Error>) -> Void) {
+        return self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration, completionHandler: handler)
+    }
+
+    /**
+        Construct all_MiniLM_L6_v2 instance asynchronously with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - configuration: the desired model configuration
+    */
+    class func load(configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> all_MiniLM_L6_v2 {
+        return try await self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration)
+    }
+
+    /**
+        Construct all_MiniLM_L6_v2 instance asynchronously with URL of the .mlmodelc directory with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - modelURL: the URL to the model
+          - configuration: the desired model configuration
+          - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
+    */
+    class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<all_MiniLM_L6_v2, Error>) -> Void) {
+        MLModel.load(contentsOf: modelURL, configuration: configuration) { result in
+            switch result {
+            case .failure(let error):
+                handler(.failure(error))
+            case .success(let model):
+                handler(.success(all_MiniLM_L6_v2(model: model)))
+            }
+        }
+    }
+
+    /**
+        Construct all_MiniLM_L6_v2 instance asynchronously with URL of the .mlmodelc directory with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - modelURL: the URL to the model
+          - configuration: the desired model configuration
+    */
+    class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> all_MiniLM_L6_v2 {
+        let model = try await MLModel.load(contentsOf: modelURL, configuration: configuration)
+        return all_MiniLM_L6_v2(model: model)
+    }
+
+    /**
+        Make a prediction using the structured interface
+
+        - parameters:
+           - input: the input to the prediction as all_MiniLM_L6_v2Input
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as all_MiniLM_L6_v2Output
+    */
+    func prediction(input: all_MiniLM_L6_v2Input) throws -> all_MiniLM_L6_v2Output {
+        return try self.prediction(input: input, options: MLPredictionOptions())
+    }
+
+    /**
+        Make a prediction using the structured interface
+
+        - parameters:
+           - input: the input to the prediction as all_MiniLM_L6_v2Input
+           - options: prediction options 
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as all_MiniLM_L6_v2Output
+    */
+    func prediction(input: all_MiniLM_L6_v2Input, options: MLPredictionOptions) throws -> all_MiniLM_L6_v2Output {
+        let outFeatures = try model.prediction(from: input, options:options)
+        return all_MiniLM_L6_v2Output(features: outFeatures)
+    }
+
+    /**
+        Make an asynchronous prediction using the structured interface
+
+        - parameters:
+           - input: the input to the prediction as all_MiniLM_L6_v2Input
+           - options: prediction options 
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as all_MiniLM_L6_v2Output
+    */
+    @available(macOS 14.0, iOS 17.0, tvOS 17.0, watchOS 10.0, *)
+    func prediction(input: all_MiniLM_L6_v2Input, options: MLPredictionOptions = MLPredictionOptions()) async throws -> all_MiniLM_L6_v2Output {
+        let outFeatures = try await model.prediction(from: input, options:options)
+        return all_MiniLM_L6_v2Output(features: outFeatures)
+    }
+
+    /**
+        Make a prediction using the convenience interface
+
+        - parameters:
+            - input_ids as 1 by 512 matrix of floats
+            - attention_mask as 1 by 512 matrix of floats
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as all_MiniLM_L6_v2Output
+    */
+    func prediction(input_ids: MLMultiArray, attention_mask: MLMultiArray) throws -> all_MiniLM_L6_v2Output {
+        let input_ = all_MiniLM_L6_v2Input(input_ids: input_ids, attention_mask: attention_mask)
+        return try self.prediction(input: input_)
+    }
+
+    /**
+        Make a prediction using the convenience interface
+
+        - parameters:
+            - input_ids as 1 by 512 matrix of floats
+            - attention_mask as 1 by 512 matrix of floats
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as all_MiniLM_L6_v2Output
+    */
+
+    func prediction(input_ids: MLShapedArray<Float>, attention_mask: MLShapedArray<Float>) throws -> all_MiniLM_L6_v2Output {
+        let input_ = all_MiniLM_L6_v2Input(input_ids: input_ids, attention_mask: attention_mask)
+        return try self.prediction(input: input_)
+    }
+
+    /**
+        Make a batch prediction using the structured interface
+
+        - parameters:
+           - inputs: the inputs to the prediction as [all_MiniLM_L6_v2Input]
+           - options: prediction options 
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as [all_MiniLM_L6_v2Output]
+    */
+    func predictions(inputs: [all_MiniLM_L6_v2Input], options: MLPredictionOptions = MLPredictionOptions()) throws -> [all_MiniLM_L6_v2Output] {
+        let batchIn = MLArrayBatchProvider(array: inputs)
+        let batchOut = try model.predictions(from: batchIn, options: options)
+        var results : [all_MiniLM_L6_v2Output] = []
+        results.reserveCapacity(inputs.count)
+        for i in 0..<batchOut.count {
+            let outProvider = batchOut.features(at: i)
+            let result =  all_MiniLM_L6_v2Output(features: outProvider)
+            results.append(result)
+        }
+        return results
+    }
+}
diff --git a/Sources/SwiftNLPGenericLLMMacros/float32_model.swift b/Sources/SwiftNLPGenericLLMMacros/float32_model.swift
new file mode 100644
index 0000000000000000000000000000000000000000..895c2fdb7ba5c7e9a7e2901daa59205de3bbded4
--- /dev/null
+++ b/Sources/SwiftNLPGenericLLMMacros/float32_model.swift
@@ -0,0 +1,316 @@
+//
+// float32_model.swift
+//
+// This file was automatically generated and should not be edited.
+//
+
+import CoreML
+
+
+/// Model Prediction Input Type
+@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *)
+class float32_modelInput : MLFeatureProvider {
+
+    /// Indices of input sequence tokens in the vocabulary as 1 by 128 matrix of 32-bit integers
+    var input_ids: MLMultiArray
+
+    /// Mask to avoid performing attention on padding token indices (1 = not masked, 0 = masked) as 1 by 128 matrix of 32-bit integers
+    var attention_mask: MLMultiArray
+
+    var featureNames: Set<String> {
+        get {
+            return ["input_ids", "attention_mask"]
+        }
+    }
+    
+    func featureValue(for featureName: String) -> MLFeatureValue? {
+        if (featureName == "input_ids") {
+            return MLFeatureValue(multiArray: input_ids)
+        }
+        if (featureName == "attention_mask") {
+            return MLFeatureValue(multiArray: attention_mask)
+        }
+        return nil
+    }
+    
+    init(input_ids: MLMultiArray, attention_mask: MLMultiArray) {
+        self.input_ids = input_ids
+        self.attention_mask = attention_mask
+    }
+
+    convenience init(input_ids: MLShapedArray<Int32>, attention_mask: MLShapedArray<Int32>) {
+        self.init(input_ids: MLMultiArray(input_ids), attention_mask: MLMultiArray(attention_mask))
+    }
+
+}
+
+
+/// Model Prediction Output Type
+@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *)
+class float32_modelOutput : MLFeatureProvider {
+
+    /// Source provided by CoreML
+    private let provider : MLFeatureProvider
+
+    /// Sequence of hidden-states at the output of the last layer of the model as 1 × 128 × 384 3-dimensional array of floats
+    var last_hidden_state: MLMultiArray {
+        return self.provider.featureValue(for: "last_hidden_state")!.multiArrayValue!
+    }
+
+    /// Sequence of hidden-states at the output of the last layer of the model as 1 × 128 × 384 3-dimensional array of floats
+    var last_hidden_stateShapedArray: MLShapedArray<Float> {
+        return MLShapedArray<Float>(self.last_hidden_state)
+    }
+
+    /// Last layer hidden-state of the first token of the sequence as 1 by 384 matrix of floats
+    var pooler_output: MLMultiArray {
+        return self.provider.featureValue(for: "pooler_output")!.multiArrayValue!
+    }
+
+    /// Last layer hidden-state of the first token of the sequence as 1 by 384 matrix of floats
+    var pooler_outputShapedArray: MLShapedArray<Float> {
+        return MLShapedArray<Float>(self.pooler_output)
+    }
+
+    var featureNames: Set<String> {
+        return self.provider.featureNames
+    }
+    
+    func featureValue(for featureName: String) -> MLFeatureValue? {
+        return self.provider.featureValue(for: featureName)
+    }
+
+    init(last_hidden_state: MLMultiArray, pooler_output: MLMultiArray) {
+        self.provider = try! MLDictionaryFeatureProvider(dictionary: ["last_hidden_state" : MLFeatureValue(multiArray: last_hidden_state), "pooler_output" : MLFeatureValue(multiArray: pooler_output)])
+    }
+
+    init(features: MLFeatureProvider) {
+        self.provider = features
+    }
+}
+
+
+/// Class for model loading and prediction
+@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *)
+class float32_model {
+    let model: MLModel
+
+    /// URL of model assuming it was installed in the same bundle as this class
+    class var urlOfModelInThisBundle : URL {
+        let bundle = Bundle(for: self)
+        return bundle.url(forResource: "float32_model", withExtension:"mlmodelc")!
+    }
+
+    /**
+        Construct float32_model instance with an existing MLModel object.
+
+        Usually the application does not use this initializer unless it makes a subclass of float32_model.
+        Such application may want to use `MLModel(contentsOfURL:configuration:)` and `float32_model.urlOfModelInThisBundle` to create a MLModel object to pass-in.
+
+        - parameters:
+          - model: MLModel object
+    */
+    init(model: MLModel) {
+        self.model = model
+    }
+
+    /**
+        Construct a model with configuration
+
+        - parameters:
+           - configuration: the desired model configuration
+
+        - throws: an NSError object that describes the problem
+    */
+    convenience init(configuration: MLModelConfiguration = MLModelConfiguration()) throws {
+        try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration)
+    }
+
+    /**
+        Construct float32_model instance with explicit path to mlmodelc file
+        - parameters:
+           - modelURL: the file url of the model
+
+        - throws: an NSError object that describes the problem
+    */
+    convenience init(contentsOf modelURL: URL) throws {
+        try self.init(model: MLModel(contentsOf: modelURL))
+    }
+
+    /**
+        Construct a model with URL of the .mlmodelc directory and configuration
+
+        - parameters:
+           - modelURL: the file url of the model
+           - configuration: the desired model configuration
+
+        - throws: an NSError object that describes the problem
+    */
+    convenience init(contentsOf modelURL: URL, configuration: MLModelConfiguration) throws {
+        try self.init(model: MLModel(contentsOf: modelURL, configuration: configuration))
+    }
+
+    /**
+        Construct float32_model instance asynchronously with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - configuration: the desired model configuration
+          - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
+    */
+    class func load(configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<float32_model, Error>) -> Void) {
+        return self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration, completionHandler: handler)
+    }
+
+    /**
+        Construct float32_model instance asynchronously with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - configuration: the desired model configuration
+    */
+    class func load(configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> float32_model {
+        return try await self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration)
+    }
+
+    /**
+        Construct float32_model instance asynchronously with URL of the .mlmodelc directory with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - modelURL: the URL to the model
+          - configuration: the desired model configuration
+          - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
+    */
+    class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<float32_model, Error>) -> Void) {
+        MLModel.load(contentsOf: modelURL, configuration: configuration) { result in
+            switch result {
+            case .failure(let error):
+                handler(.failure(error))
+            case .success(let model):
+                handler(.success(float32_model(model: model)))
+            }
+        }
+    }
+
+    /**
+        Construct float32_model instance asynchronously with URL of the .mlmodelc directory with optional configuration.
+
+        Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
+
+        - parameters:
+          - modelURL: the URL to the model
+          - configuration: the desired model configuration
+    */
+    class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> float32_model {
+        let model = try await MLModel.load(contentsOf: modelURL, configuration: configuration)
+        return float32_model(model: model)
+    }
+
+    /**
+        Make a prediction using the structured interface
+
+        - parameters:
+           - input: the input to the prediction as float32_modelInput
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as float32_modelOutput
+    */
+    func prediction(input: float32_modelInput) throws -> float32_modelOutput {
+        return try self.prediction(input: input, options: MLPredictionOptions())
+    }
+
+    /**
+        Make a prediction using the structured interface
+
+        - parameters:
+           - input: the input to the prediction as float32_modelInput
+           - options: prediction options 
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as float32_modelOutput
+    */
+    func prediction(input: float32_modelInput, options: MLPredictionOptions) throws -> float32_modelOutput {
+        let outFeatures = try model.prediction(from: input, options:options)
+        return float32_modelOutput(features: outFeatures)
+    }
+
+    /**
+        Make an asynchronous prediction using the structured interface
+
+        - parameters:
+           - input: the input to the prediction as float32_modelInput
+           - options: prediction options 
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as float32_modelOutput
+    */
+    @available(macOS 14.0, iOS 17.0, tvOS 17.0, watchOS 10.0, *)
+    func prediction(input: float32_modelInput, options: MLPredictionOptions = MLPredictionOptions()) async throws -> float32_modelOutput {
+        let outFeatures = try await model.prediction(from: input, options:options)
+        return float32_modelOutput(features: outFeatures)
+    }
+
+    /**
+        Make a prediction using the convenience interface
+
+        - parameters:
+            - input_ids: Indices of input sequence tokens in the vocabulary as 1 by 128 matrix of 32-bit integers
+            - attention_mask: Mask to avoid performing attention on padding token indices (1 = not masked, 0 = masked) as 1 by 128 matrix of 32-bit integers
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as float32_modelOutput
+    */
+    func prediction(input_ids: MLMultiArray, attention_mask: MLMultiArray) throws -> float32_modelOutput {
+        let input_ = float32_modelInput(input_ids: input_ids, attention_mask: attention_mask)
+        return try self.prediction(input: input_)
+    }
+
+    /**
+        Make a prediction using the convenience interface
+
+        - parameters:
+            - input_ids: Indices of input sequence tokens in the vocabulary as 1 by 128 matrix of 32-bit integers
+            - attention_mask: Mask to avoid performing attention on padding token indices (1 = not masked, 0 = masked) as 1 by 128 matrix of 32-bit integers
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as float32_modelOutput
+    */
+
+    func prediction(input_ids: MLShapedArray<Int32>, attention_mask: MLShapedArray<Int32>) throws -> float32_modelOutput {
+        let input_ = float32_modelInput(input_ids: input_ids, attention_mask: attention_mask)
+        return try self.prediction(input: input_)
+    }
+
+    /**
+        Make a batch prediction using the structured interface
+
+        - parameters:
+           - inputs: the inputs to the prediction as [float32_modelInput]
+           - options: prediction options 
+
+        - throws: an NSError object that describes the problem
+
+        - returns: the result of the prediction as [float32_modelOutput]
+    */
+    func predictions(inputs: [float32_modelInput], options: MLPredictionOptions = MLPredictionOptions()) throws -> [float32_modelOutput] {
+        let batchIn = MLArrayBatchProvider(array: inputs)
+        let batchOut = try model.predictions(from: batchIn, options: options)
+        var results : [float32_modelOutput] = []
+        results.reserveCapacity(inputs.count)
+        for i in 0..<batchOut.count {
+            let outProvider = batchOut.features(at: i)
+            let result =  float32_modelOutput(features: outProvider)
+            results.append(result)
+        }
+        return results
+    }
+}
diff --git a/Tests/SwiftNLPTests/1. Data Collection/HNSW/DurableHNSWCorpusTests.swift b/Tests/SwiftNLPTests/1. Data Collection/HNSW/DurableHNSWCorpusTests.swift
index 358e0f9e8bd68fb1ff8ace55215d2a905bf8c503..545a525e87afec840e7020cb7b5bf92916450981 100644
--- a/Tests/SwiftNLPTests/1. Data Collection/HNSW/DurableHNSWCorpusTests.swift	
+++ b/Tests/SwiftNLPTests/1. Data Collection/HNSW/DurableHNSWCorpusTests.swift	
@@ -1,230 +1,230 @@
-#if os(macOS)
-import XCTest
-import Foundation
-import CoreLMDB
-import System
-@testable import SwiftNLP
-
-// MARK: These tests are not to be included within the pipeline
-
-final class DurableHNSWCorpusTests: XCTestCase {
-    /// This is used to skip these tests in the GitLab pipeline
-    override class var defaultTestSuite: XCTestSuite {
-        if ProcessInfo.processInfo.environment["SKIP_TESTS"] == "DurableHNSWCorpusTests" {
-            return XCTestSuite(name: "Empty")
-        }
-        return super.defaultTestSuite
-    }
-    
-    /// Setting up constants for environment
-    private let ONE_GB: Int = 1_073_741_824
-    private let ONE_MB: Int = 1_048_576
-    private let ONE_KB: Int = 1_024
-    private let ONE_B:  Int = 1
-    private let DEFAULT_MAXREADERS: UInt32 = 126
-    private let DEFAULT_MAXDBS:     UInt32 = 10
-    
-    /// Setting up working directory
-    private var workingDirectoryPath: FilePath!
-    
-    override func setUpWithError() throws {
-        try super.setUpWithError()
-        
-        let fileManager = FileManager.default
-        let directoryURL = fileManager.homeDirectoryForCurrentUser.appendingPathComponent("/Downloads/lmdb")
-        try fileManager.createDirectory(at: directoryURL, withIntermediateDirectories: true, attributes: nil)
-        workingDirectoryPath = FilePath(directoryURL.path)
-        
-        /// This commented out code alternatively works in the XCode bundle resource environment
-//        guard let resourcesPath = Bundle.module.resourcePath else { fatalError("Failed to find resource path.") }
-//        let resourcesDirectoryURL = URL(fileURLWithPath: resourcesPath).appendingPathComponent("lmdb")
+//#if os(macOS)
+//import XCTest
+//import Foundation
+//import CoreLMDB
+//import System
+//@testable import SwiftNLP
+//
+//// MARK: These tests are not to be included within the pipeline
+//
+//final class DurableHNSWCorpusTests: XCTestCase {
+//    /// This is used to skip these tests in the GitLab pipeline
+//    override class var defaultTestSuite: XCTestSuite {
+//        if ProcessInfo.processInfo.environment["SKIP_TESTS"] == "DurableHNSWCorpusTests" {
+//            return XCTestSuite(name: "Empty")
+//        }
+//        return super.defaultTestSuite
+//    }
+//    
+//    /// Setting up constants for environment
+//    private let ONE_GB: Int = 1_073_741_824
+//    private let ONE_MB: Int = 1_048_576
+//    private let ONE_KB: Int = 1_024
+//    private let ONE_B:  Int = 1
+//    private let DEFAULT_MAXREADERS: UInt32 = 126
+//    private let DEFAULT_MAXDBS:     UInt32 = 10
+//    
+//    /// Setting up working directory
+//    private var workingDirectoryPath: FilePath!
+//    
+//    override func setUpWithError() throws {
+//        try super.setUpWithError()
+//        
 //        let fileManager = FileManager.default
-//        try fileManager.createDirectory(at: resourcesDirectoryURL, withIntermediateDirectories: true, attributes: nil)
-//        print("Resources directory: \(resourcesDirectoryURL)")
-//        workingDirectoryPath = FilePath(resourcesDirectoryURL.path)
-    }
-    
-    func testBuildBasicCorpus() throws {
-        let docs = [
-            "CNTK formerly known as Computational Network Toolkit",
-            "is a free easy-to-use open-source commercial-grade toolkit",
-            "that enable us to train deep learning algorithms to learn like the human brain."
-        ]
-        
-        /// Setting up the environment
-        let env = try Environment()
-        try env.setMapSize(ONE_GB)
-        try env.setMaxReaders(DEFAULT_MAXREADERS)
-        try env.setMaxDBs(DEFAULT_MAXDBS)
-        try env.open(path: workingDirectoryPath)
-        
-        /// Writing to LMDB
-        let transaction = try Transaction.begin(.write, in: env)
-
-        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
-            namespace: "testBasicExample",
-            in: transaction
-        )
-        
-        for doc in docs {
-            try corpus.addUntokenizedDocument(doc, in: transaction)
-        }
-        
-        try transaction.commit()
-        
-        /// Reading from LMDB
-        let readTransaction = try Transaction.begin(.read, in: env)
-        
-        let _ = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
-            namespace: "testBasicExample",
-            in: readTransaction
-        )
-        
-        readTransaction.abort()
-        
-        // XCTAssert(readCorpus.count == 3)
-        /// readCorpus.count == 3 will fail because we have not fixed the bug with setting size upon reads
-        /// This is because size is only incremented when insertion is called but it is not called when read from disk!
-    }
-    
-    func testQueryBasicCorpus() async throws {
-        let docs = [
-            "The quick brown fox jumps over the lazy dog",
-            "I enjoy taking long walks along the beach at sunset",
-            "Advances in neural networks have enabled new AI capabilities",
-            "The stock market experienced a significant downturn last week",
-            "Cooking a good meal can be both an art and a science",
-            "The exploration of space is both challenging and rewarding",
-            "Machine learning models are becoming increasingly sophisticated",
-            "I love reading about history and ancient civilizations"
-        ]
-        
-        let query = "I like to read about new technology and artificial intelligence"
-        //let documentEncoder = ContextFreeEncoder<Double>(source: .glove6B50d)
-        
-        /// Setting up the environment
-        let env = try Environment()
-        try env.setMapSize(ONE_GB)
-        try env.setMaxReaders(DEFAULT_MAXREADERS)
-        try env.setMaxDBs(DEFAULT_MAXDBS)
-        try env.open(path: workingDirectoryPath)
-        
-        let transaction = try Transaction.begin(.write, in: env)
-        
-        /// Saving the memory map to disk
-        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
-            namespace: "testBasicQueryExample",
-            in: transaction
-        )
-        
-        for doc in docs {
-            try corpus.addUntokenizedDocument(doc, in: transaction)
-        }
-        
-        corpus.saveDictionaryToDownloads(fileName: "dictionary.mmap")
-        
-        try transaction.commit()
-        
-        do {
-            let queryVector: [Double] = corpus.documentEncoder.encodeToken(query).map { Double($0) }
-            
-            /// Reading the memory map (and dictionary) from disk
-            let readTransaction = try Transaction.begin(.write, in: env)
-            
-            let readCorpus = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
-                namespace: "testBasicQueryExample",
-                in: readTransaction
-            )
-            
-            readCorpus.dictionary = DurableHNSWCorpus.readDictionaryFromDownloads(fileName: "dictionary.mmap") // TODO: move this to initializer?
-            
-            let result = try readCorpus.index.find(near: queryVector, limit: 8, in: transaction)
-            
-            for result in result {
-                let key = Int(result.id.foreignKey)!
-                print(readCorpus.getUntokenizedDocument(at: key))
-            }
-        } catch {
-            print("Error when trying corpus.encodedDocuments.find(): \(error)")
-        }
-        
-        try transaction.commit()
-    }
-    
-    func testBuildGuelphSubredditCorpus() async throws {
-        /// Generates the LMDB durable storage to disk but runs no tests otherwise
-    
-        /// Setting up the environment
-        let env = try Environment()
-        try env.setMapSize(ONE_GB)
-        try env.setMaxReaders(DEFAULT_MAXREADERS)
-        try env.setMaxDBs(DEFAULT_MAXDBS)
-        try env.open(path: workingDirectoryPath)
-        
-        /// Get subreddit data
-        guard let submissionsURL = Bundle.module.url(forResource: "Guelph_submissions", withExtension: "zst") else {
-            fatalError("Failed to find waterloo_submissions.zst in test bundle.")
-        }
-        guard let submissionsData = try? Data(contentsOf: submissionsURL) else {
-            fatalError("Failed to load waterloo_submissions.zst from test bundle.")
-        }
-
-        let (submissions, _ ): ([Submission],[Data]) = try await loadFromRedditArchive(submissionsData)
-        
-        let transaction = try Transaction.begin(.write, in: env)
-        
-        let documentEncoder = ContextFreeEncoder<Double>(source: .glove6B50d)
-        
-        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder>(
-            encoder: documentEncoder,
-            namespace: "subreddit_durable",
-            in: transaction
-        )
-
-        /// Add documents to corpus
-        for submission in submissions {
-            if let text = submission.selftext {
-                try corpus.addUntokenizedDocument(text, in: transaction)
-            }
-        }
-
-        /// Save dictionary to disk
-        corpus.saveDictionaryToDownloads(fileName: "dictionary.mmap")
-        
-        try transaction.commit()
-    }
-    
-    func testQueryGuelphSubredditCorpus() async throws {
-        let documentEncoder = ContextFreeEncoder<Double>(source: .glove6B50d)
-        
-        /// Setting up the environment
-        let env = try Environment()
-        try env.setMapSize(ONE_GB)
-        try env.setMaxReaders(DEFAULT_MAXREADERS)
-        try env.setMaxDBs(DEFAULT_MAXDBS)
-        try env.open(path: workingDirectoryPath)
-        
-        /// Reading the memory map (and dictionary) from disk
-        let transaction = try Transaction.begin(.read, in: env)
-        
-        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder>(
-            encoder: documentEncoder,
-            namespace: "subreddit_durable",
-            in: transaction
-        )
-        
-        corpus.dictionary = DurableHNSWCorpus.readDictionaryFromDownloads(fileName: "dictionary.mmap")
-        
-        let query = "I love waterloo and I love the geese."
-        let queryVector: [Double] = documentEncoder.encodeToken(query).map { Double($0) }
-        
-        let result = try corpus.index.find(near: queryVector, limit: 8, in: transaction)
-        
-        for result in result {
-            let key = Int(result.id.foreignKey)!
-            print(corpus.getUntokenizedDocument(at: key))
-        }
-    }
-}
-#endif
-
+//        let directoryURL = fileManager.homeDirectoryForCurrentUser.appendingPathComponent("/Downloads/lmdb")
+//        try fileManager.createDirectory(at: directoryURL, withIntermediateDirectories: true, attributes: nil)
+//        workingDirectoryPath = FilePath(directoryURL.path)
+//        
+//        /// This commented out code alternatively works in the XCode bundle resource environment
+////        guard let resourcesPath = Bundle.module.resourcePath else { fatalError("Failed to find resource path.") }
+////        let resourcesDirectoryURL = URL(fileURLWithPath: resourcesPath).appendingPathComponent("lmdb")
+////        let fileManager = FileManager.default
+////        try fileManager.createDirectory(at: resourcesDirectoryURL, withIntermediateDirectories: true, attributes: nil)
+////        print("Resources directory: \(resourcesDirectoryURL)")
+////        workingDirectoryPath = FilePath(resourcesDirectoryURL.path)
+//    }
+//    
+//    func testBuildBasicCorpus() throws {
+//        let docs = [
+//            "CNTK formerly known as Computational Network Toolkit",
+//            "is a free easy-to-use open-source commercial-grade toolkit",
+//            "that enable us to train deep learning algorithms to learn like the human brain."
+//        ]
+//        
+//        /// Setting up the environment
+//        let env = try Environment()
+//        try env.setMapSize(ONE_GB)
+//        try env.setMaxReaders(DEFAULT_MAXREADERS)
+//        try env.setMaxDBs(DEFAULT_MAXDBS)
+//        try env.open(path: workingDirectoryPath)
+//        
+//        /// Writing to LMDB
+//        let transaction = try Transaction.begin(.write, in: env)
+//
+//        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
+//            namespace: "testBasicExample",
+//            in: transaction
+//        )
+//        
+//        for doc in docs {
+//            try corpus.addUntokenizedDocument(doc, in: transaction)
+//        }
+//        
+//        try transaction.commit()
+//        
+//        /// Reading from LMDB
+//        let readTransaction = try Transaction.begin(.read, in: env)
+//        
+//        let _ = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
+//            namespace: "testBasicExample",
+//            in: readTransaction
+//        )
+//        
+//        readTransaction.abort()
+//        
+//        // XCTAssert(readCorpus.count == 3)
+//        /// readCorpus.count == 3 will fail because we have not fixed the bug with setting size upon reads
+//        /// This is because size is only incremented when insertion is called but it is not called when read from disk!
+//    }
+//    
+//    func testQueryBasicCorpus() async throws {
+//        let docs = [
+//            "The quick brown fox jumps over the lazy dog",
+//            "I enjoy taking long walks along the beach at sunset",
+//            "Advances in neural networks have enabled new AI capabilities",
+//            "The stock market experienced a significant downturn last week",
+//            "Cooking a good meal can be both an art and a science",
+//            "The exploration of space is both challenging and rewarding",
+//            "Machine learning models are becoming increasingly sophisticated",
+//            "I love reading about history and ancient civilizations"
+//        ]
+//        
+//        let query = "I like to read about new technology and artificial intelligence"
+//        //let documentEncoder = ContextFreeEncoder<Double>(source: .glove6B50d)
+//        
+//        /// Setting up the environment
+//        let env = try Environment()
+//        try env.setMapSize(ONE_GB)
+//        try env.setMaxReaders(DEFAULT_MAXREADERS)
+//        try env.setMaxDBs(DEFAULT_MAXDBS)
+//        try env.open(path: workingDirectoryPath)
+//        
+//        let transaction = try Transaction.begin(.write, in: env)
+//        
+//        /// Saving the memory map to disk
+//        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
+//            namespace: "testBasicQueryExample",
+//            in: transaction
+//        )
+//        
+//        for doc in docs {
+//            try corpus.addUntokenizedDocument(doc, in: transaction)
+//        }
+//        
+//        corpus.saveDictionaryToDownloads(fileName: "dictionary.mmap")
+//        
+//        try transaction.commit()
+//        
+//        do {
+//            let queryVector: [Double] = corpus.documentEncoder.encodeToken(query).map { Double($0) }
+//            
+//            /// Reading the memory map (and dictionary) from disk
+//            let readTransaction = try Transaction.begin(.write, in: env)
+//            
+//            let readCorpus = try DurableHNSWCorpus<String,ContextFreeEncoder<Double>>(
+//                namespace: "testBasicQueryExample",
+//                in: readTransaction
+//            )
+//            
+//            readCorpus.dictionary = DurableHNSWCorpus.readDictionaryFromDownloads(fileName: "dictionary.mmap") // TODO: move this to initializer?
+//            
+//            let result = try readCorpus.index.find(near: queryVector, limit: 8, in: transaction)
+//            
+//            for result in result {
+//                let key = Int(result.id.foreignKey)!
+//                print(readCorpus.getUntokenizedDocument(at: key))
+//            }
+//        } catch {
+//            print("Error when trying corpus.encodedDocuments.find(): \(error)")
+//        }
+//        
+//        try transaction.commit()
+//    }
+//    
+//    func testBuildGuelphSubredditCorpus() async throws {
+//        /// Generates the LMDB durable storage to disk but runs no tests otherwise
+//    
+//        /// Setting up the environment
+//        let env = try Environment()
+//        try env.setMapSize(ONE_GB)
+//        try env.setMaxReaders(DEFAULT_MAXREADERS)
+//        try env.setMaxDBs(DEFAULT_MAXDBS)
+//        try env.open(path: workingDirectoryPath)
+//        
+//        /// Get subreddit data
+//        guard let submissionsURL = Bundle.module.url(forResource: "Guelph_submissions", withExtension: "zst") else {
+//            fatalError("Failed to find waterloo_submissions.zst in test bundle.")
+//        }
+//        guard let submissionsData = try? Data(contentsOf: submissionsURL) else {
+//            fatalError("Failed to load waterloo_submissions.zst from test bundle.")
+//        }
+//
+//        let (submissions, _ ): ([Submission],[Data]) = try await loadFromRedditArchive(submissionsData)
+//        
+//        let transaction = try Transaction.begin(.write, in: env)
+//        
+//        let documentEncoder = ContextFreeEncoder<Double>(source: .glove6B50d)
+//        
+//        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder>(
+//            encoder: documentEncoder,
+//            namespace: "subreddit_durable",
+//            in: transaction
+//        )
+//
+//        /// Add documents to corpus
+//        for submission in submissions {
+//            if let text = submission.selftext {
+//                try corpus.addUntokenizedDocument(text, in: transaction)
+//            }
+//        }
+//
+//        /// Save dictionary to disk
+//        corpus.saveDictionaryToDownloads(fileName: "dictionary.mmap")
+//        
+//        try transaction.commit()
+//    }
+//    
+//    func testQueryGuelphSubredditCorpus() async throws {
+//        let documentEncoder = ContextFreeEncoder<Double>(source: .glove6B50d)
+//        
+//        /// Setting up the environment
+//        let env = try Environment()
+//        try env.setMapSize(ONE_GB)
+//        try env.setMaxReaders(DEFAULT_MAXREADERS)
+//        try env.setMaxDBs(DEFAULT_MAXDBS)
+//        try env.open(path: workingDirectoryPath)
+//        
+//        /// Reading the memory map (and dictionary) from disk
+//        let transaction = try Transaction.begin(.read, in: env)
+//        
+//        let corpus = try DurableHNSWCorpus<String,ContextFreeEncoder>(
+//            encoder: documentEncoder,
+//            namespace: "subreddit_durable",
+//            in: transaction
+//        )
+//        
+//        corpus.dictionary = DurableHNSWCorpus.readDictionaryFromDownloads(fileName: "dictionary.mmap")
+//        
+//        let query = "I love waterloo and I love the geese."
+//        let queryVector: [Double] = documentEncoder.encodeToken(query).map { Double($0) }
+//        
+//        let result = try corpus.index.find(near: queryVector, limit: 8, in: transaction)
+//        
+//        for result in result {
+//            let key = Int(result.id.foreignKey)!
+//            print(corpus.getUntokenizedDocument(at: key))
+//        }
+//    }
+//}
+//#endif
+//
diff --git a/Tests/SwiftNLPTests/AllMiniLM_sampleTest.swift b/Tests/SwiftNLPTests/AllMiniLM_sampleTest.swift
index 697a75079d5d52d6dfee48d6caa2936d1a93ba19..e1f236ea9e4ddd6455cbb5effca8615d73ba5bbf 100644
--- a/Tests/SwiftNLPTests/AllMiniLM_sampleTest.swift
+++ b/Tests/SwiftNLPTests/AllMiniLM_sampleTest.swift
@@ -1,68 +1,75 @@
-//#if canImport(CoreML)
-//import XCTest
-//@testable import SwiftNLP
-//@testable import SwiftAnnoy
-//
-//final class BERT_test: XCTestCase {
-//    
-//    
-//    func testBasicExample() async throws {
-//        
-//        let docs = [
-//            "The quick brown fox jumps over the lazy dog",
-//            "I enjoy taking long walks along the beach at sunset",
-//            "Advances in neural networks have enabled new AI capabilities",
-//            "The stock market experienced a significant downturn last week",
-//            "Cooking a good meal can be both an art and a science",
-//            "The exploration of space is both challenging and rewarding",
-//            "Machine learning models are becoming increasingly sophisticated",
-//            "I love reading about history and ancient civilizations"
-//        ]
-//        
-//        let query = [
-//            "I like to read about new technology and artificial intelligence"
-//        ]
-//        //        let docs = ["cat dog", "bee fly"]
-//        
-//        var database_embedding: [[Float]] = []
-//        var query_embedding: [Float] = []
-//        var embedding_dim: Int = 384
-//        
-//        var model = MiniLMEmbeddings()
-//        
-//        query_embedding = await model.encode(sentence: query[0])!
-//        
-//        var i = 1
-//        //append sentence embedding to database_embedding
-//        for string in docs {
-//            if let vector = await model.encode(sentence: string) {
-//                database_embedding.append(vector)
-//                //print(i)
-//                i += 1
-//            } else {
-//                fatalError("Error occurred1")
-//            }
-//            
-//        }
-//        
-//        let index = AnnoyIndex<Float>(itemLength: embedding_dim, metric: .euclidean)
-//        
-//        try? index.addItems(items: &database_embedding)
-//        try? index.build(numTrees: 50)
-//        
-//        let results = index.getNNsForVector(vector: &query_embedding, neighbors: 8)
-//        
-////        if let finalresult = results {
-////            let extractedIndeices = finalresult.indices
-////            for index in extractedIndeices {
-////                if index < docs.count {
-////                    print(docs[index])
-////                } else {
-////                    print("Index \(index) out of range.")
-////                }
-////            }
-////        }
-//        print(results)
-//    }
-//}
-//#endif
+#if canImport(CoreML)
+import XCTest
+
+import Darwin
+
+@testable import SwiftNLP
+@testable import SwiftAnnoy
+
+final class BERT_test: XCTestCase {
+   
+   
+    func testBasicExample() async throws {
+        setbuf(stdout, nil)
+        setbuf(stderr, nil)
+        print("running test")
+       
+        let docs = [
+            "The quick brown fox jumps over the lazy dog",
+            "I enjoy taking long walks along the beach at sunset",
+            "Advances in neural networks have enabled new AI capabilities",
+            "The stock market experienced a significant downturn last week",
+            "Cooking a good meal can be both an art and a science",
+            "The exploration of space is both challenging and rewarding",
+            "Machine learning models are becoming increasingly sophisticated",
+            "I love reading about history and ancient civilizations"
+        ]
+        
+        let query = [
+            "I like to read about new technology and artificial intelligence"
+        ]
+        
+        for model in ["all_MiniLM_L6_v2"] {
+            var database_embedding: [[Float]] = []
+            var query_embedding: [Float] = []
+            var embedding_dim: Int = 384
+           
+            var model = LLMEmbeddings(model_type: model)
+           
+            query_embedding = await model.encode(sentence: query[0])!
+           
+            var i = 1
+
+            for string in docs {
+                if let vector = await model.encode(sentence: string) {
+                    database_embedding.append(vector)
+                    i += 1
+                } else {
+                    fatalError("Error occurred!")
+                }
+            }
+            
+            let index = AnnoyIndex<Float>(itemLength: 384, metric: .euclidean)
+           
+            try? index.addItems(items: &database_embedding)
+            try? index.build(numTrees: 50)
+           
+            let results = index.getNNsForVector(vector: &query_embedding, neighbors: 8)
+           
+            if let finalresult = results {
+                let extractedIndeices = finalresult.indices
+                for index in extractedIndeices {
+                    if index < docs.count {
+                        print(docs[index])
+                    } else {
+                        print("Index \(index) out of range.")
+                    }
+                }
+            }
+            
+            print(results)
+            print(database_embedding)
+        }
+    }
+}
+#endif
diff --git a/adding_new_model.md b/adding_new_model.md
new file mode 100644
index 0000000000000000000000000000000000000000..6158178750a9a33c6541b8e5d40446a8c7942d40
--- /dev/null
+++ b/adding_new_model.md
@@ -0,0 +1,33 @@
+CoreML models can be registered and used automatically with the CoreMLEncoder. To add a new model:
+
+1. Add the .mlpackage file to ```Sources/SwiftNLP/Resources```.
+
+2. From the ```swiftnlp``` directory, run the following from the command line:
+    - ```xcrun coremlcompiler generate Sources/SwiftNLP/Resources/[name of mlpackage file]/ --language Swift Sources/SwiftNLPGenericLLMMacros```
+    
+    This should generate a .swift file.
+
+3. Add the following script steps to the ```.compile_models``` item in ```.gitlab-ci.yml``` file:
+    - ```xcrun coremlcompiler compile Sources/SwiftNLP/Resources/[name of mlpackage file]/ Sources/SwiftNLP/Models```
+    - ```xcrun coremlcompiler generate Sources/SwiftNLP/Resources/[name of mlpackage file]/ --language Swift Sources/SwiftNLP/Resources```
+    - ```mv Sources/SwiftNLP/Resources/[name of the generated swift file] Sources/SwiftNLP/2.\ Encoding```
+
+4. Navigate to ```Sources/SwiftNLPGenericLLMMacros/ModelClasses.swift```.
+    - Add the following to the `LLM_MODEL_CLASSES` map:
+        ```
+        "[string name of model]": [
+            LLMModelClassesKey.Input: [name of model input class].self,
+            LLMModelClassesKey.Output: [name of model output class].self,
+            LLMModelClassesKey.Model: [name of model class].self,
+            LLMModelClassesKey.FeatureName: [name of feature to use],
+            LLMModelClassesKey.URL: "[name of ml package file without .mlpackage].mlmodelc",
+            LLMModelClassesKey.InputDimension: [input size]
+        ]
+        ```
+    - Notes on how to perform the above step:
+        - [string name of model] can be any string you want.
+        - the [name of model input class], [name of model output class], and [name of model class] can all be retrieved from the generated swift file. They are the names of the classes present in that file.
+        - the name of the feature to use is present in the model output class. Typically, it's `embeddings` though some models may have other fields like `pooler_output`.
+        - the [input size] can be retrieved from the auto-generated documentation in the model input class.
+
+5. Build the project, and the model can now be used anywhere. To access the model, instantiata an `LLMEmbedings` object with the model's string identifier. If using the CoreMLEncoder, set its `model` field to the model's string identifier.
\ No newline at end of file