sparknlp_jsl.eval
#
Module Contents#
Classes#
Wrapper class for a Java companion object |
|
Wrapper class for a Java companion object |
|
Wrapper class for a Java companion object |
|
Wrapper class for a Java companion object |
|
Wrapper class for a Java companion object |
|
Wrapper class for a Java companion object |
- class NerCrfEvaluation(spark, test_file, tag_level='')#
Bases:
sparknlp.internal.ExtendedJavaWrapper
Wrapper class for a Java companion object
- java_obj#
- sc = None#
- apply()#
- computeAccuracyAnnotator(train_file, ner, embeddings)#
- computeAccuracyModel(ner)#
- getDataFrame(spark, jdf)#
- new_java_array(pylist, java_class)#
ToDo: Inspired from spark 2.0. Review if spark changes
- new_java_array_integer(pylist)#
- new_java_array_string(pylist)#
- new_java_obj(java_class, *args)#
- spark_version()#
- class NerDLEvaluation(spark, test_file, tag_level='')#
Bases:
sparknlp.internal.ExtendedJavaWrapper
Wrapper class for a Java companion object
- java_obj#
- sc = None#
- apply()#
- computeAccuracyAnnotator(train_file, ner, embeddings)#
- computeAccuracyModel(ner)#
- getDataFrame(spark, jdf)#
- new_java_array(pylist, java_class)#
ToDo: Inspired from spark 2.0. Review if spark changes
- new_java_array_integer(pylist)#
- new_java_array_string(pylist)#
- new_java_obj(java_class, *args)#
- spark_version()#
- class NerDLMetrics(mode='full_chunk')#
Bases:
sparknlp.internal.ExtendedJavaWrapper
Wrapper class for a Java companion object
- java_obj#
- sc = None#
- apply()#
- computeMetricsFromDF(df, prediction_col='ner', label_col='label', drop_o=True, case_sensitive=True)#
- getDataFrame(spark, jdf)#
- new_java_array(pylist, java_class)#
ToDo: Inspired from spark 2.0. Review if spark changes
- new_java_array_integer(pylist)#
- new_java_array_string(pylist)#
- new_java_obj(java_class, *args)#
- spark_version()#
- class NorvigSpellEvaluation(spark, test_file, ground_truth_file)#
Bases:
sparknlp.internal.ExtendedJavaWrapper
Wrapper class for a Java companion object
- java_obj#
- sc = None#
- apply()#
- computeAccuracyAnnotator(train_file, spell)#
- computeAccuracyModel(spell)#
- getDataFrame(spark, jdf)#
- new_java_array(pylist, java_class)#
ToDo: Inspired from spark 2.0. Review if spark changes
- new_java_array_integer(pylist)#
- new_java_array_string(pylist)#
- new_java_obj(java_class, *args)#
- spark_version()#
- class POSEvaluation(spark, test_file)#
Bases:
sparknlp.internal.ExtendedJavaWrapper
Wrapper class for a Java companion object
- java_obj#
- sc = None#
- apply()#
- computeAccuracyAnnotator(train_file, pos)#
- computeAccuracyModel(pos)#
- getDataFrame(spark, jdf)#
- new_java_array(pylist, java_class)#
ToDo: Inspired from spark 2.0. Review if spark changes
- new_java_array_integer(pylist)#
- new_java_array_string(pylist)#
- new_java_obj(java_class, *args)#
- spark_version()#
- class SymSpellEvaluation(spark, test_file, ground_truth_file)#
Bases:
sparknlp.internal.ExtendedJavaWrapper
Wrapper class for a Java companion object
- java_obj#
- sc = None#
- apply()#
- computeAccuracyAnnotator(train_file, spell)#
- computeAccuracyModel(spell)#
- getDataFrame(spark, jdf)#
- new_java_array(pylist, java_class)#
ToDo: Inspired from spark 2.0. Review if spark changes
- new_java_array_integer(pylist)#
- new_java_array_string(pylist)#
- new_java_obj(java_class, *args)#
- spark_version()#