|
29 | 29 |
|
30 | 30 |
|
31 | 31 | class LPGen: |
32 | | - def __init__(self, kb_path=None, kb=None, storage_path=None, max_num_lps=1000, beyond_alc=False, depth=3, max_child_length=20, refinement_expressivity=0.2, |
33 | | - downsample_refinements=True, sample_fillers_count=10, num_sub_roots=50, min_num_pos_examples=1): |
| 32 | + def __init__(self, kb_path=None, kb=None, storage_path=None, max_num_lps=1000, beyond_alc=False, depth=3, |
| 33 | + max_child_length=20, refinement_expressivity=0.2, downsample_refinements=True, sample_fillers_count=10, |
| 34 | + num_sub_roots=50, min_num_pos_examples=1, max_pos_neg_examples_per_lp=None): |
34 | 35 | """ |
35 | | - Args |
36 | | - - kb_path: path to the owl file representing the knowledge base/ontology |
37 | | - - storage_path: directory in which to store the data to be generated. Not the directory needs not to exists, it would be created automatically |
38 | | - - max_num_lps: the maximum number of learning problems to store |
39 | | - - beyond_alc: whether to generate learning problems in ALCHIQD |
40 | | - - depth, max_child_length, refinement_expressivity, sample_fillers_count, num_sub_roots all refer to the size of the data (learning problems) to be generated |
41 | | - - downsample_refinements: whether to downsample refinements in ExpressRefinement. If refinement_expressivity<1, this must be set to True |
| 36 | + Args: |
| 37 | + kb_path: path to the owl file representing the knowledge base/ontology |
| 38 | + kb: an instance of KnowledgeBase class. Can be used instead of kb_path. |
| 39 | + storage_path: directory in which to store the data to be generated. Not the directory needs not to exists, it would be created automatically |
| 40 | + max_num_lps: the maximum number of learning problems to store |
| 41 | + beyond_alc: whether to generate learning problems in ALCHIQD |
| 42 | + depth, max_child_length, refinement_expressivity, sample_fillers_count, num_sub_roots all refer to the size of the data (learning problems) to be generated |
| 43 | + downsample_refinements: whether to downsample refinements in ExpressRefinement. If refinement_expressivity<1, this must be set to True |
42 | 44 | """ |
43 | | - self.lp_gen = KB2Data(path=kb_path,knowledge_base=kb, storage_path=storage_path, max_num_lps=max_num_lps, beyond_alc=beyond_alc, depth=depth, |
44 | | - max_child_length=max_child_length, refinement_expressivity=refinement_expressivity, |
45 | | - downsample_refinements=downsample_refinements, sample_fillers_count=sample_fillers_count, num_sub_roots=num_sub_roots, min_num_pos_examples=min_num_pos_examples) |
| 45 | + self.lp_gen = KB2Data(path=kb_path,knowledge_base=kb, storage_path=storage_path, max_num_lps=max_num_lps, |
| 46 | + beyond_alc=beyond_alc, depth=depth, max_child_length=max_child_length, |
| 47 | + refinement_expressivity=refinement_expressivity, |
| 48 | + downsample_refinements=downsample_refinements, sample_fillers_count=sample_fillers_count, |
| 49 | + num_sub_roots=num_sub_roots, min_num_pos_examples=min_num_pos_examples, |
| 50 | + max_pos_neg_examples_per_lp = max_pos_neg_examples_per_lp) |
46 | 51 | def generate(self): |
47 | 52 | self.lp_gen.generate_descriptions().save_data() |
0 commit comments