class TensorflowBioGPT extends MedicalEncoderDecoderModel
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- TensorflowBioGPT
- MedicalEncoderDecoderModel
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new TensorflowBioGPT(tensorflow: TensorflowWrapper, bpeTokenizer: BioGPTTokenizer, configProtoBytes: Option[Array[Byte]] = None, numLayers: Integer = 24, numAttentionHeads: Integer = 16)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- val bpeTokenizer: BioGPTTokenizer
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
- def decode(sentences: Array[Array[Int]]): Seq[String]
-
def
encode(prompts: Seq[Annotation], isCaseSensitive: Boolean): Seq[Array[Int]]
- Attributes
- protected
-
def
encodeQA(questions: Seq[Annotation], contexts: Seq[Annotation], questionPrompt: String, isCaseSensitive: Boolean): Seq[Array[Int]]
- Attributes
- protected
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
generate(prompts: Seq[Annotation], batchSize: Int, maxNewTokens: Int, maxContextLength: Int, doSample: Boolean, topK: Int, randomSeed: Option[Int] = None, ignoreTokenIds: Array[Int] = Array(), isCaseSensitive: Boolean, stopAtEos: Boolean, noRepeatNgramSize: Int): Seq[Annotation]
- Definition Classes
- TensorflowBioGPT → MedicalEncoderDecoderModel
- def generateNoBeamSearch(inputIds: Seq[Array[Int]], maxContextLength: Int, maxNewTokens: Int, doSample: Boolean, topK: Int, vocabSize: Int, randomSeed: Option[Int], session: Session, ignoreTokenIds: Array[Int], questionAnswerTerminals: Array[Int], skipLastToken: Boolean, useCache: Boolean, returnContext: Boolean, stopAtEos: Boolean, noRepeatNgramSize: Int = 2): (Array[Array[Int]], Array[Float])
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getGeneratedNgrams(prevInputIds: Seq[Array[Int]], generatedNgrams: Array[Map[IndexedSeq[Int], List[Int]]], hypoIdx: Int, curLen: Int, noRepeatNgramSize: Int): Array[Int]
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
predict(sentences: Seq[Annotation], task: String, batchSize: Int, maxNewTokens: Int, maxTextLength: Int, doSample: Boolean, topK: Int, randomSeed: Option[Int] = None, ignoreTokenIds: Array[Int] = Array(), isCaseSensitive: Boolean, stopAtEos: Boolean, noRepeatNgramSize: Int, refineSummary: Boolean = false, refineSummaryTargetLength: Int = 100, refineChunkSize: Int = 512, refineMaxAttempts: Int = 3): Seq[Annotation]
- Definition Classes
- MedicalEncoderDecoderModel
-
def
predictQuestions(questionAndContexts: Seq[(Annotation, Annotation)], batchSize: Int, maxNewTokens: Int, maxContextLength: Int, doSample: Boolean, topK: Int, questionPrompt: String, randomSeed: Option[Int] = None, ignoreTokenIds: Array[Int] = Array(), isCaseSensitive: Boolean, questionAnswerTerminals: Array[Int], skipLastToken: Boolean, useCache: Boolean, noRepeatNgramSize: Int): Seq[Annotation]
- Definition Classes
- TensorflowBioGPT → MedicalEncoderDecoderModel
-
def
sessionWarmup(useCache: Boolean): Unit
- Attributes
- protected
-
def
sessionWarmup(): Unit
- Attributes
- protected
- Definition Classes
- TensorflowBioGPT → MedicalEncoderDecoderModel
- def softmax(scores: Array[Float]): Array[Float]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
- def tag(batch: Seq[Array[Int]], maxNewTokens: Int, maxContextLength: Int, doSample: Boolean, topK: Int, randomSeed: Option[Int], ignoreTokenIds: Array[Int], questionAnswerTerminals: Array[Int], skipLastToken: Boolean, useCache: Boolean, returnContext: Boolean, stopAtEos: Boolean, noRepeatNgramSize: Int): (Array[Array[Int]], Array[Float])
- val tensorflow: TensorflowWrapper
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()