How to use the questionary.rawselect function in questionary

To help you get started, we’ve selected a few questionary examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github abhinavkashyap / sciwing / sciwing / cli / parsect_interact.py View on Github external
-------
        str
            The choice of experiment chosen by the user

        """
        output_dirpath = pathlib.Path(OUTPUT_DIR)
        experiment_dirnames = self.get_experiments_folder_names()

        if len(experiment_dirnames) == 0:
            self.msg_printer.fail(
                f"There are no experiments for the model type {self.model_type_answer}"
            )
            exit(1)

        experiment_choices = [Choice(dirname) for dirname in experiment_dirnames]
        exp_choice = questionary.rawselect(
            "Please select an experiment", choices=experiment_choices, qmark="❓"
        ).ask()

        exp_choice_path = pathlib.Path(OUTPUT_DIR, exp_choice)
        if not exp_choice_path.is_dir():
            with self.msg_printer.loading(
                f"Downloading experiment {exp_choice} from s3"
            ):
                self.s3util.download_folder(
                    exp_choice, download_only_best_checkpoint=True
                )
        return str(output_dirpath.joinpath(exp_choice_path))
github abhinavkashyap / sciwing / sciwing / cli / parsect_interact.py View on Github external
inference_func = self.model_type2inf_func[self.model_type_answer]
        inference_client = inference_func(exp_dir)
        inference_client.run_test()

        while True:
            choices = [
                Choice("See-Confusion-Matrix"),
                Choice("See-examples-of-Classifications"),
                Choice("See-prf-table"),
                Choice(title="Enter text ", value="enter_text"),
                Choice("exit"),
            ]
            if self.model_type_answer == "lstm-crf-scienceie-tagger":
                choices.append(Choice("official-results", "semeval_official_results"))

            interaction_choice = questionary.rawselect(
                "What would you like to do now", qmark="❓", choices=choices
            ).ask()
            if interaction_choice == "See-Confusion-Matrix":
                inference_client.print_confusion_matrix()
            elif interaction_choice == "See-examples-of-Classifications":
                misclassification_choice = questionary.text(
                    "Enter Two Classes separated by a space. [Hint: 1 2]"
                ).ask()
                two_classes = [
                    int(class_) for class_ in misclassification_choice.split()
                ]
                first_class, second_class = two_classes[0], two_classes[1]
                sentences = inference_client.get_misclassified_sentences(
                    first_class, second_class
                )
                self.msg_printer.divider(
github abhinavkashyap / sciwing / sciwing / cli / s3_mv_cli.py View on Github external
def ask_deletion() -> str:
        """ Since this is deletion, we want confirmation, just to be sure
        whether to keep the deleted folder locally or to remove it

        Returns
        -------
        str
            An yes or no answer to the question

        """
        deletion_question = questionary.rawselect(
            "Do you also want to delete the file locally. Caution! File will be removed locally",
            qmark="❓",
            choices=[Choice("yes"), Choice("no")],
        )
        deletion_answer = deletion_question.ask()
        return deletion_answer
github abhinavkashyap / sciwing / sciwing / cli / parsect_interact.py View on Github external
def ask_model_type(self):
        """
        Asks to chose a model amongst different model types that are available in sciwing

        Returns
        -------
        str
            Model type chosen by the user
        """
        choices = self.return_model_type_choices()
        model_type_question = questionary.rawselect(
            "We have the following trained models. Chose one",
            qmark="❓",
            choices=choices,
        )
        return model_type_question.ask()
github tmbo / questionary / examples / readme.py View on Github external
import questionary
from examples import custom_style_dope

if __name__ == "__main__":
    questionary.text("What's your first name").ask()
    questionary.password("What's your secret?").ask()
    questionary.confirm("Are you amazed?").ask()
    questionary.select(
        "What do you want to do?",
        choices=["Order a pizza", "Make a reservation", "Ask for opening hours"],
    ).ask()
    questionary.rawselect(
        "What do you want to do?",
        choices=["Order a pizza", "Make a reservation", "Ask for opening hours"],
    ).ask()
    questionary.checkbox(
        "Select toppings", choices=["foo", "bar", "bazz"], style=custom_style_dope
    ).ask()