c

com.johnsnowlabs.ml.tensorflow

TensorflowAlbertClassification

class TensorflowAlbertClassification extends Serializable with TensorflowForClassification

Linear Supertypes
TensorflowForClassification, Serializable, Serializable, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TensorflowAlbertClassification
  2. TensorflowForClassification
  3. Serializable
  4. Serializable
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new TensorflowAlbertClassification(tensorflowWrapper: TensorflowWrapper, spp: SentencePieceWrapper, configProtoBytes: Option[Array[Byte]] = None, tags: Map[String, Int], signatures: Option[Map[String, String]] = None)

    tensorflowWrapper

    ALBERT Model wrapper with TensorFlow Wrapper

    spp

    ALBERT SentencePiece model with SentencePieceWrapper

    configProtoBytes

    Configuration for TensorFlow session

    tags

    labels which model was trained with in order

    signatures

    TF v2 signatures in Spark NLP

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. val _tfAlbertSignatures: Map[String, String]
  5. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  6. def calculateSigmoid(scores: Array[Float]): Array[Float]

    Calcuate sigmoid from returned logits

    Calcuate sigmoid from returned logits

    scores

    logits output from output layer

    Definition Classes
    TensorflowForClassification
  7. def calculateSoftmax(scores: Array[Float]): Array[Float]

    Calcuate softmax from retruned logits

    Calcuate softmax from retruned logits

    scores

    logits output from output layer

    Definition Classes
    TensorflowForClassification
  8. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  9. def constructAnnotationForSequenceClassifier(sentence: Sentence, label: String, meta: Array[(String, String)]): Annotation
    Definition Classes
    TensorflowForClassification
  10. def constructMetaForSequenceClassifier(tags: Map[String, Int], scores: Array[Float]): Array[(String, String)]
    Definition Classes
    TensorflowForClassification
  11. def encode(sentences: Seq[(WordpieceTokenizedSentence, Int)], maxSequenceLength: Int): Seq[Array[Int]]

    Encode the input sequence to indexes IDs adding padding where necessary

    Encode the input sequence to indexes IDs adding padding where necessary

    Definition Classes
    TensorflowForClassification
  12. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  13. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  14. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  15. def findIndexedToken(tokenizedSentences: Seq[TokenizedSentence], sentence: (WordpieceTokenizedSentence, Int), tokenPiece: TokenPiece): Option[IndexedToken]
  16. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  17. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  18. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  19. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  20. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  21. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  22. def predict(tokenizedSentences: Seq[TokenizedSentence], batchSize: Int, maxSentenceLength: Int, caseSensitive: Boolean, tags: Map[String, Int]): Seq[Annotation]
    Definition Classes
    TensorflowForClassification
  23. def predictSequence(tokenizedSentences: Seq[TokenizedSentence], sentences: Seq[Sentence], batchSize: Int, maxSentenceLength: Int, caseSensitive: Boolean, coalesceSentences: Boolean = false, tags: Map[String, Int], activation: String = ActivationFunction.softmax): Seq[Annotation]
    Definition Classes
    TensorflowForClassification
  24. def scoresToLabelForSequenceClassifier(tags: Map[String, Int], scores: Array[Float]): String
    Definition Classes
    TensorflowForClassification
  25. val sentenceEndTokenId: Int
    Attributes
    protected
    Definition Classes
    TensorflowAlbertClassificationTensorflowForClassification
  26. val sentencePadTokenId: Int
    Attributes
    protected
    Definition Classes
    TensorflowAlbertClassificationTensorflowForClassification
  27. val sentenceStartTokenId: Int
    Attributes
    protected
    Definition Classes
    TensorflowAlbertClassificationTensorflowForClassification
  28. val spp: SentencePieceWrapper
  29. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  30. def tag(batch: Seq[Array[Int]]): Seq[Array[Array[Float]]]
  31. def tagSequence(batch: Seq[Array[Int]], activation: String): Array[Array[Float]]
  32. val tensorflowWrapper: TensorflowWrapper
  33. def toString(): String
    Definition Classes
    AnyRef → Any
  34. def tokenizeWithAlignment(sentences: Seq[TokenizedSentence], maxSeqLength: Int, caseSensitive: Boolean): Seq[WordpieceTokenizedSentence]
  35. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  36. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  37. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  38. def wordAndSpanLevelAlignmentWithTokenizer(tokenLogits: Array[Array[Float]], tokenizedSentences: Seq[TokenizedSentence], sentence: (WordpieceTokenizedSentence, Int), tags: Map[String, Int]): Seq[Annotation]

    Word-level and span-level alignment with Tokenizer https://github.com/google-research/bert#tokenization

    Word-level and span-level alignment with Tokenizer https://github.com/google-research/bert#tokenization

    ### Input orig_tokens = ["John", "Johanson", "'s", "house"] labels = ["NNP", "NNP", "POS", "NN"]

    # bert_tokens == ["[CLS]", "john", "johan", "##son", "'", "s", "house", "[SEP]"] # orig_to_tok_map == [1, 2, 4, 6]

    Definition Classes
    TensorflowForClassification

Inherited from Serializable

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped