=== modified file 'doc/glmark2.1.in'
@@ -40,9 +40,13 @@
Show all scene option values used for benchmarks
(only explicitly set options are shown by default)
.TP
-\fB\-\-show-fps\fR
-Show live FPS count on screen (showing live FPS
-affects benchmarking results, use with care!)
+\fB\-\-run-forever\fR
+Run indefinitely, looping from the last benchmark
+back to the first
+.TP
+\fB\-\-annotate\fR
+Annotate the benchmarks with on-screen information
+(same as -b :show-fps=true:title=#info#)
.TP
\fB\-d\fR, \fB\-\-debug\fR
Display debug messages
=== modified file 'src/android.cpp'
@@ -31,21 +31,22 @@
#include "options.h"
#include "log.h"
#include "util.h"
-#include "default-benchmarks.h"
#include "main-loop.h"
+#include "benchmark-collection.h"
static Canvas *g_canvas;
static MainLoop *g_loop;
+static BenchmarkCollection *g_benchmark_collection;
class MainLoopAndroid : public MainLoop
{
public:
- MainLoopAndroid(Canvas &canvas) :
- MainLoop(canvas) {}
-
- virtual void after_scene_setup() {}
-
- virtual void before_scene_teardown()
+ MainLoopAndroid(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
+ MainLoop(canvas, benchmarks) {}
+
+ virtual void log_scene_info() {}
+
+ virtual void log_scene_result()
{
Log::info("%s FPS: %u", scene_->info_string().c_str(),
scene_->average_fps());
@@ -55,12 +56,12 @@
class MainLoopDecorationAndroid : public MainLoopDecoration
{
public:
- MainLoopDecorationAndroid(Canvas &canvas) :
- MainLoopDecoration(canvas) {}
-
- virtual void after_scene_setup() {}
-
- virtual void before_scene_teardown()
+ MainLoopDecorationAndroid(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
+ MainLoopDecoration(canvas, benchmarks) {}
+
+ virtual void log_scene_info() {}
+
+ virtual void log_scene_result()
{
Log::info("%s FPS: %u", scene_->info_string().c_str(),
scene_->average_fps());
@@ -186,12 +187,17 @@
Benchmark::register_scene(*new SceneDesktop(*g_canvas));
Benchmark::register_scene(*new SceneBuffer(*g_canvas));
- if (Options::show_fps)
- g_loop = new MainLoopDecorationAndroid(*g_canvas);
- else
- g_loop = new MainLoopAndroid(*g_canvas);
+ g_benchmark_collection = new BenchmarkCollection();
+ g_benchmark_collection->populate_from_options();
- g_loop->add_benchmarks();
+ if (g_benchmark_collection->needs_decoration()) {
+ g_loop = new MainLoopDecorationAndroid(*g_canvas,
+ g_benchmark_collection->benchmarks());
+ }
+ else {
+ g_loop = new MainLoopAndroid(*g_canvas,
+ g_benchmark_collection->benchmarks());
+ }
}
void
@@ -213,6 +219,7 @@
static_cast<void>(env);
delete g_loop;
+ delete g_benchmark_collection;
delete g_canvas;
}
=== added file 'src/benchmark-collection.cpp'
@@ -0,0 +1,119 @@
+/*
+ * Copyright © 2012 Linaro Limited
+ *
+ * This file is part of the glmark2 OpenGL (ES) 2.0 benchmark.
+ *
+ * glmark2 is free software: you can redistribute it and/or modify it under the
+ * terms of the GNU General Public License as published by the Free Software
+ * Foundation, either version 3 of the License, or (at your option) any later
+ * version.
+ *
+ * glmark2 is distributed in the hope that it will be useful, but WITHOUT ANY
+ * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+ * details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * glmark2. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * Authors:
+ * Alexandros Frantzis
+ */
+#include <fstream>
+#include "benchmark-collection.h"
+#include "default-benchmarks.h"
+#include "options.h"
+#include "log.h"
+#include "util.h"
+
+BenchmarkCollection::~BenchmarkCollection()
+{
+ Util::dispose_pointer_vector(benchmarks_);
+}
+
+void
+BenchmarkCollection::add(const std::vector<std::string> &benchmarks)
+{
+ for (std::vector<std::string>::const_iterator iter = benchmarks.begin();
+ iter != benchmarks.end();
+ iter++)
+ {
+ benchmarks_.push_back(new Benchmark(*iter));
+ }
+}
+
+void
+BenchmarkCollection::populate_from_options()
+{
+ if (Options::annotate) {
+ std::vector<std::string> annotate;
+ annotate.push_back(":show-fps=true:title=#info#");
+ add(annotate);
+ }
+
+ if (!Options::benchmarks.empty())
+ add(Options::benchmarks);
+
+ if (!Options::benchmark_files.empty())
+ add_benchmarks_from_files();
+
+ if (!benchmarks_contain_normal_scenes())
+ add(DefaultBenchmarks::get());
+}
+
+bool
+BenchmarkCollection::needs_decoration()
+{
+ for (std::vector<Benchmark *>::const_iterator bench_iter = benchmarks_.begin();
+ bench_iter != benchmarks_.end();
+ bench_iter++)
+ {
+ const Benchmark *bench = *bench_iter;
+ if (bench->needs_decoration())
+ return true;
+ }
+
+ return false;
+}
+
+
+void
+BenchmarkCollection::add_benchmarks_from_files()
+{
+ for (std::vector<std::string>::const_iterator iter = Options::benchmark_files.begin();
+ iter != Options::benchmark_files.end();
+ iter++)
+ {
+ std::ifstream ifs(iter->c_str());
+
+ if (!ifs.fail()) {
+ std::string line;
+
+ while (getline(ifs, line)) {
+ if (!line.empty())
+ benchmarks_.push_back(new Benchmark(line));
+ }
+ }
+ else {
+ Log::error("Cannot open benchmark file %s\n",
+ iter->c_str());
+ }
+
+ }
+}
+
+bool
+BenchmarkCollection::benchmarks_contain_normal_scenes()
+{
+ for (std::vector<Benchmark *>::const_iterator bench_iter = benchmarks_.begin();
+ bench_iter != benchmarks_.end();
+ bench_iter++)
+ {
+ const Benchmark *bench = *bench_iter;
+ if (!bench->scene().name().empty())
+ return true;
+ }
+
+ return false;
+}
+
=== added file 'src/benchmark-collection.h'
@@ -0,0 +1,59 @@
+/*
+ * Copyright © 2012 Linaro Limited
+ *
+ * This file is part of the glmark2 OpenGL (ES) 2.0 benchmark.
+ *
+ * glmark2 is free software: you can redistribute it and/or modify it under the
+ * terms of the GNU General Public License as published by the Free Software
+ * Foundation, either version 3 of the License, or (at your option) any later
+ * version.
+ *
+ * glmark2 is distributed in the hope that it will be useful, but WITHOUT ANY
+ * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+ * details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * glmark2. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * Authors:
+ * Alexandros Frantzis
+ */
+#ifndef GLMARK2_BENCHMARK_COLLECTION_H_
+#define GLMARK2_BENCHMARK_COLLECTION_H_
+
+#include <vector>
+#include <string>
+#include "benchmark.h"
+
+class BenchmarkCollection
+{
+public:
+ BenchmarkCollection() {}
+ ~BenchmarkCollection();
+
+ /*
+ * Adds benchmarks to the collection.
+ */
+ void add(const std::vector<std::string> &benchmarks);
+
+ /*
+ * Populates the collection guided by the global options.
+ */
+ void populate_from_options();
+
+ /*
+ * Whether the benchmarks in this collection need decoration.
+ */
+ bool needs_decoration();
+
+ const std::vector<Benchmark *>& benchmarks() { return benchmarks_; }
+
+private:
+ void add_benchmarks_from_files();
+ bool benchmarks_contain_normal_scenes();
+
+ std::vector<Benchmark *> benchmarks_;
+};
+
+#endif /* GLMARK2_BENCHMARK_COLLECTION_H_ */
=== modified file 'src/benchmark.cpp'
@@ -120,6 +120,23 @@
scene_.unload();
}
+bool
+Benchmark::needs_decoration() const
+{
+ for (vector<OptionPair>::const_iterator iter = options_.begin();
+ iter != options_.end();
+ iter++)
+ {
+ if ((iter->first == "show-fps" && iter->second == "true") ||
+ (iter->first == "title" && !iter->second.empty()))
+ {
+ return true;
+ }
+ }
+
+ return false;
+}
+
void
Benchmark::load_options()
{
=== modified file 'src/benchmark.h'
@@ -91,6 +91,11 @@
void teardown_scene();
/**
+ * Whether the benchmark needs extra decoration.
+ */
+ bool needs_decoration() const;
+
+ /**
* Registers a Scene, so that it becomes accessible by name.
*/
static void register_scene(Scene &scene);
=== modified file 'src/main-loop.cpp'
@@ -23,26 +23,20 @@
#include "main-loop.h"
#include "util.h"
#include "log.h"
-#include "default-benchmarks.h"
#include <string>
#include <sstream>
-#include <fstream>
/************
* MainLoop *
************/
-MainLoop::MainLoop(Canvas &canvas) :
- canvas_(canvas)
+MainLoop::MainLoop(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
+ canvas_(canvas), benchmarks_(benchmarks)
{
reset();
}
-MainLoop::~MainLoop()
-{
- Util::dispose_pointer_vector(benchmarks_);
-}
void
MainLoop::reset()
@@ -53,28 +47,6 @@
bench_iter_ = benchmarks_.begin();
}
-void
-MainLoop::add_benchmarks()
-{
- if (!Options::benchmarks.empty())
- add_custom_benchmarks();
-
- if (!Options::benchmark_files.empty())
- add_custom_benchmarks_from_files();
-
- if (!benchmarks_contain_normal_scenes())
- add_default_benchmarks();
-
- bench_iter_ = benchmarks_.begin();
-}
-
-void
-MainLoop::add_benchmarks(const std::vector<Benchmark *> &benchmarks)
-{
- benchmarks_.insert(benchmarks_.end(), benchmarks.begin(), benchmarks.end());
- bench_iter_ = benchmarks_.begin();
-}
-
unsigned int
MainLoop::score()
{
@@ -102,7 +74,7 @@
else
break;
- bench_iter_++;
+ next_benchmark();
}
/* If we have found a valid scene, set it up */
@@ -112,6 +84,7 @@
before_scene_setup();
scene_ = &(*bench_iter_)->setup_scene();
after_scene_setup();
+ log_scene_info();
}
else {
/* ... otherwise we are done */
@@ -130,10 +103,10 @@
*/
if (!scene_->running() || should_quit) {
score_ += scene_->average_fps();
- before_scene_teardown();
+ log_scene_result();
(*bench_iter_)->teardown_scene();
scene_ = 0;
- bench_iter_++;
+ next_benchmark();
benchmarks_run_++;
}
@@ -152,90 +125,34 @@
}
void
-MainLoop::after_scene_setup()
+MainLoop::log_scene_info()
{
Log::info("%s", scene_->info_string().c_str());
Log::flush();
}
void
-MainLoop::before_scene_teardown()
+MainLoop::log_scene_result()
{
static const std::string format(Log::continuation_prefix + " FPS: %u\n");
Log::info(format.c_str(), scene_->average_fps());
}
void
-MainLoop::add_default_benchmarks()
-{
- const std::vector<std::string> &default_benchmarks = DefaultBenchmarks::get();
-
- for (std::vector<std::string>::const_iterator iter = default_benchmarks.begin();
- iter != default_benchmarks.end();
- iter++)
- {
- benchmarks_.push_back(new Benchmark(*iter));
- }
-}
-
-void
-MainLoop::add_custom_benchmarks()
-{
- for (std::vector<std::string>::const_iterator iter = Options::benchmarks.begin();
- iter != Options::benchmarks.end();
- iter++)
- {
- benchmarks_.push_back(new Benchmark(*iter));
- }
-}
-
-void
-MainLoop::add_custom_benchmarks_from_files()
-{
- for (std::vector<std::string>::const_iterator iter = Options::benchmark_files.begin();
- iter != Options::benchmark_files.end();
- iter++)
- {
- std::ifstream ifs(iter->c_str());
-
- if (!ifs.fail()) {
- std::string line;
-
- while (getline(ifs, line)) {
- if (!line.empty())
- benchmarks_.push_back(new Benchmark(line));
- }
- }
- else {
- Log::error("Cannot open benchmark file %s\n",
- iter->c_str());
- }
-
- }
-}
-
-bool
-MainLoop::benchmarks_contain_normal_scenes()
-{
- for (std::vector<Benchmark *>::const_iterator bench_iter = benchmarks_.begin();
- bench_iter != benchmarks_.end();
- bench_iter++)
- {
- const Benchmark *bench = *bench_iter;
- if (!bench->scene().name().empty())
- return true;
- }
-
- return false;
-}
-
+MainLoop::next_benchmark()
+{
+ bench_iter_++;
+ if (bench_iter_ == benchmarks_.end() && Options::run_forever)
+ bench_iter_ = benchmarks_.begin();
+}
/**********************
* MainLoopDecoration *
**********************/
-MainLoopDecoration::MainLoopDecoration(Canvas &canvas) :
- MainLoop(canvas), fps_renderer_(0), last_fps_(0)
+MainLoopDecoration::MainLoopDecoration(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
+ MainLoop(canvas, benchmarks), show_fps_(false), show_title_(false),
+ fps_renderer_(0), title_renderer_(0), last_fps_(0)
{
}
@@ -244,36 +161,79 @@
{
delete fps_renderer_;
fps_renderer_ = 0;
+ delete title_renderer_;
+ title_renderer_ = 0;
}
void
MainLoopDecoration::draw()
{
static const unsigned int fps_interval = 500000;
- uint64_t now = Util::get_timestamp_us();
canvas_.clear();
scene_->draw();
scene_->update();
- if (now - fps_timestamp_ >= fps_interval) {
- last_fps_ = scene_->average_fps();
+ if (show_fps_) {
+ uint64_t now = Util::get_timestamp_us();
+ if (now - fps_timestamp_ >= fps_interval) {
+ last_fps_ = scene_->average_fps();
+ fps_renderer_update_text(last_fps_);
+ fps_timestamp_ = now;
+ }
+ fps_renderer_->render();
+ }
+
+ if (show_title_)
+ title_renderer_->render();
+
+ canvas_.update();
+}
+
+void
+MainLoopDecoration::before_scene_setup()
+{
+ delete fps_renderer_;
+ fps_renderer_ = 0;
+ delete title_renderer_;
+ title_renderer_ = 0;
+}
+
+void
+MainLoopDecoration::after_scene_setup()
+{
+ const Scene::Option &show_fps_option(scene_->options().find("show-fps")->second);
+ const Scene::Option &title_option(scene_->options().find("title")->second);
+ show_fps_ = show_fps_option.value == "true";
+ show_title_ = !title_option.value.empty();
+
+ if (show_fps_) {
+ const Scene::Option &fps_pos_option(scene_->options().find("fps-pos")->second);
+ const Scene::Option &fps_size_option(scene_->options().find("fps-size")->second);
+ fps_renderer_ = new TextRenderer(canvas_);
+ fps_renderer_->position(vec2_from_pos_string(fps_pos_option.value));
+ fps_renderer_->size(Util::fromString<float>(fps_size_option.value));
fps_renderer_update_text(last_fps_);
- fps_timestamp_ = now;
- }
- fps_renderer_->render();
-
- canvas_.update();
-}
-
-void
-MainLoopDecoration::before_scene_setup()
-{
- delete fps_renderer_;
- fps_renderer_ = new TextRenderer(canvas_);
- fps_renderer_update_text(last_fps_);
- fps_timestamp_ = Util::get_timestamp_us();
+ fps_timestamp_ = Util::get_timestamp_us();
+ }
+
+ if (show_title_) {
+ const Scene::Option &title_pos_option(scene_->options().find("title-pos")->second);
+ const Scene::Option &title_size_option(scene_->options().find("title-size")->second);
+ title_renderer_ = new TextRenderer(canvas_);
+ title_renderer_->position(vec2_from_pos_string(title_pos_option.value));
+ title_renderer_->size(Util::fromString<float>(title_size_option.value));
+
+ if (title_option.value == "#info#")
+ title_renderer_->text(scene_->info_string());
+ else if (title_option.value == "#name#")
+ title_renderer_->text(scene_->name());
+ else if (title_option.value == "#r2d2#")
+ title_renderer_->text("Help me, Obi-Wan Kenobi. You're my only hope.");
+ else
+ title_renderer_->text(title_option.value);
+ }
}
void
@@ -284,12 +244,28 @@
fps_renderer_->text(ss.str());
}
+LibMatrix::vec2
+MainLoopDecoration::vec2_from_pos_string(const std::string &s)
+{
+ LibMatrix::vec2 v(0.0, 0.0);
+ std::vector<std::string> elems;
+ Util::split(s, ',', elems);
+
+ if (elems.size() > 0)
+ v.x(Util::fromString<float>(elems[0]));
+
+ if (elems.size() > 1)
+ v.y(Util::fromString<float>(elems[1]));
+
+ return v;
+}
+
/**********************
* MainLoopValidation *
**********************/
-MainLoopValidation::MainLoopValidation(Canvas &canvas) :
- MainLoop(canvas)
+MainLoopValidation::MainLoopValidation(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
+ MainLoop(canvas, benchmarks)
{
}
@@ -307,7 +283,7 @@
}
void
-MainLoopValidation::before_scene_teardown()
+MainLoopValidation::log_scene_result()
{
static const std::string format(Log::continuation_prefix + " Validation: %s\n");
std::string result;
=== modified file 'src/main-loop.h'
@@ -25,6 +25,7 @@
#include "canvas.h"
#include "benchmark.h"
#include "text-renderer.h"
+#include "vec.h"
#include <vector>
/**
@@ -33,9 +34,9 @@
class MainLoop
{
public:
- MainLoop(Canvas &canvas);
+ MainLoop(Canvas &canvas, const std::vector<Benchmark *> &benchmarks);
- virtual ~MainLoop();
+ virtual ~MainLoop() {}
/**
* Resets the main loop.
@@ -45,19 +46,6 @@
*/
void reset();
- /**
- * Adds benchmarks.
- *
- * This method takes into account benchmark related command line options
- * to decide which benchmarks to add.
- */
- void add_benchmarks();
-
- /**
- * Adds user defined benchmarks.
- */
- void add_benchmarks(const std::vector<Benchmark *> &benchmarks);
-
/**
* Gets the current total benchmarking score.
*/
@@ -83,27 +71,27 @@
/**
* Overridable method for post scene-setup customizations.
*/
- virtual void after_scene_setup();
-
- /**
- * Overridable method for pre scene-teardown customizations.
- */
- virtual void before_scene_teardown();
+ virtual void after_scene_setup() {}
+
+ /**
+ * Overridable method for logging scene info.
+ */
+ virtual void log_scene_info();
+
+ /**
+ * Overridable method for logging scene result.
+ */
+ virtual void log_scene_result();
protected:
+ void next_benchmark();
Canvas &canvas_;
Scene *scene_;
- std::vector<Benchmark *> benchmarks_;
+ const std::vector<Benchmark *> &benchmarks_;
unsigned int score_;
unsigned int benchmarks_run_;
std::vector<Benchmark *>::const_iterator bench_iter_;
-
-private:
- void add_default_benchmarks();
- void add_custom_benchmarks();
- void add_custom_benchmarks_from_files();
- bool benchmarks_contain_normal_scenes();
};
/**
@@ -112,15 +100,21 @@
class MainLoopDecoration : public MainLoop
{
public:
- MainLoopDecoration(Canvas &canvas);
+ MainLoopDecoration(Canvas &canvas, const std::vector<Benchmark *> &benchmarks);
virtual ~MainLoopDecoration();
virtual void draw();
virtual void before_scene_setup();
+ virtual void after_scene_setup();
protected:
void fps_renderer_update_text(unsigned int fps);
+ LibMatrix::vec2 vec2_from_pos_string(const std::string &s);
+
+ bool show_fps_;
+ bool show_title_;
TextRenderer *fps_renderer_;
+ TextRenderer *title_renderer_;
unsigned int last_fps_;
uint64_t fps_timestamp_;
};
@@ -131,10 +125,10 @@
class MainLoopValidation : public MainLoop
{
public:
- MainLoopValidation(Canvas &canvas);
+ MainLoopValidation(Canvas &canvas, const std::vector<Benchmark *> &benchmarks);
virtual void draw();
- virtual void before_scene_teardown();
+ virtual void log_scene_result();
};
#endif /* GLMARK2_MAIN_LOOP_H_ */
=== modified file 'src/main.cpp'
@@ -28,9 +28,9 @@
#include "options.h"
#include "log.h"
#include "util.h"
-#include "default-benchmarks.h"
#include "text-renderer.h"
#include "main-loop.h"
+#include "benchmark-collection.h"
#include <iostream>
#include <fstream>
@@ -111,24 +111,33 @@
void
do_benchmark(Canvas &canvas)
{
- MainLoop loop_normal(canvas);
- MainLoopDecoration loop_decoration(canvas);
+ BenchmarkCollection benchmark_collection;
+ MainLoop *loop;
- MainLoop &loop(Options::show_fps ? loop_decoration : loop_normal);
- loop.add_benchmarks();
+ benchmark_collection.populate_from_options();
- while (loop.step());
-
- Log::info("=======================================================\n");
- Log::info(" glmark2 Score: %u \n", loop.score());
- Log::info("=======================================================\n");
+ if (benchmark_collection.needs_decoration())
+ loop = new MainLoopDecoration(canvas, benchmark_collection.benchmarks());
+ else
+ loop = new MainLoop(canvas, benchmark_collection.benchmarks());
+
+ while (loop->step());
+
+ Log::info("=======================================================\n");
+ Log::info(" glmark2 Score: %u \n", loop->score());
+ Log::info("=======================================================\n");
+
+ delete loop;
}
void
do_validation(Canvas &canvas)
{
- MainLoopValidation loop(canvas);
- loop.add_benchmarks();
+ BenchmarkCollection benchmark_collection;
+
+ benchmark_collection.populate_from_options();
+
+ MainLoopValidation loop(canvas, benchmark_collection.benchmarks());
while (loop.step());
}
=== modified file 'src/options.cpp'
@@ -38,20 +38,22 @@
bool Options::list_scenes = false;
bool Options::show_all_options = false;
bool Options::show_debug = false;
-bool Options::show_fps = false;
bool Options::show_help = false;
bool Options::reuse_context = false;
+bool Options::run_forever = false;
+bool Options::annotate = false;
static struct option long_options[] = {
+ {"annotate", 0, 0, 0},
{"benchmark", 1, 0, 0},
{"benchmark-file", 1, 0, 0},
{"validate", 0, 0, 0},
{"no-swap-buffers", 0, 0, 0},
{"reuse-context", 0, 0, 0},
+ {"run-forever", 0, 0, 0},
{"size", 1, 0, 0},
{"list-scenes", 0, 0, 0},
{"show-all-options", 0, 0, 0},
- {"show-fps", 0, 0, 0},
{"debug", 0, 0, 0},
{"help", 0, 0, 0},
{0, 0, 0, 0}
@@ -103,8 +105,10 @@
" and their options\n"
" --show-all-options Show all scene option values used for benchmarks\n"
" (only explicitly set options are shown by default)\n"
- " --show-fps Show live FPS count on screen (showing live FPS\n"
- " affects benchmarking results, use with care!)\n"
+ " --run-forever Run indefinitely, looping from the last benchmark\n"
+ " back to the first\n"
+ " --annotate Annotate the benchmarks with on-screen information\n"
+ " (same as -b :show-fps=true:title=#info#)\n"
" -d, --debug Display debug messages\n"
" -h, --help Display help\n");
}
@@ -127,6 +131,8 @@
if (option_index != -1)
optname = long_options[option_index].name;
+ if (!strcmp(optname, "annotate"))
+ Options::annotate = true;
if (c == 'b' || !strcmp(optname, "benchmark"))
Options::benchmarks.push_back(optarg);
else if (c == 'f' || !strcmp(optname, "benchmark-file"))
@@ -143,8 +149,8 @@
Options::list_scenes = true;
else if (!strcmp(optname, "show-all-options"))
Options::show_all_options = true;
- else if (!strcmp(optname, "show-fps"))
- Options::show_fps = true;
+ else if (!strcmp(optname, "run-forever"))
+ Options::run_forever = true;
else if (c == 'd' || !strcmp(optname, "debug"))
Options::show_debug = true;
else if (c == 'h' || !strcmp(optname, "help"))
=== modified file 'src/options.h'
@@ -39,9 +39,10 @@
static bool list_scenes;
static bool show_all_options;
static bool show_debug;
- static bool show_fps;
static bool show_help;
static bool reuse_context;
+ static bool run_forever;
+ static bool annotate;
};
#endif /* OPTIONS_H_ */
=== modified file 'src/scene.cpp'
@@ -46,6 +46,20 @@
options_["fragment-precision"] = Scene::Option("fragment-precision",
"default,default,default,default",
"The precision values for the fragment shader (\"int,float,sampler2d,samplercube\")");
+ /* FPS options */
+ options_["show-fps"] = Scene::Option("show-fps", "false",
+ "Show live FPS counter");
+ options_["fps-pos"] = Scene::Option("fps-pos", "-1.0,-1.0",
+ "The position on screen where to show FPS");
+ options_["fps-size"] = Scene::Option("fps-size", "0.03",
+ "The width of each glyph for the FPS");
+ /* Title options */
+ options_["title"] = Scene::Option("title", "",
+ "The scene title to show");
+ options_["title-pos"] = Scene::Option("title-pos", "-0.7,-1.0",
+ "The position on screen where to show the title");
+ options_["title-size"] = Scene::Option("title-size", "0.03",
+ "The width of each glyph in the title");
}
Scene::~Scene()