Main repository of MikuMikuStudio
Revision | 77f4e1e5396dc0be4824a283edcb90e960e6b580 (tree) |
---|---|
Time | 2013-05-16 06:44:21 |
Author | iwgeric@gmail.com <iwgeric@gmai...> |
Commiter | iwgeric@gmail.com |
Android: Refactor AndroidAudioRenderer into an interface with 2 implementations (current MediaPlayer/SoundPool and new OpenAL Soft). Added AppSetting that allows AndroidHarness to switch the audio renderer (default is still MediaPlayer/SoundPool).
git-svn-id: http://jmonkeyengine.googlecode.com/svn/trunk@10615 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
@@ -15,7 +15,6 @@ import android.widget.ImageView; | ||
15 | 15 | import android.widget.TextView; |
16 | 16 | import com.jme3.audio.AudioRenderer; |
17 | 17 | import com.jme3.audio.android.AndroidAudioRenderer; |
18 | -import com.jme3.audio.android.AndroidOpenALSoftAudioRenderer; | |
19 | 18 | import com.jme3.input.JoyInput; |
20 | 19 | import com.jme3.input.TouchInput; |
21 | 20 | import com.jme3.input.android.AndroidSensorJoyInput; |
@@ -72,6 +71,20 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt | ||
72 | 71 | * set to 2, 4 to enable multisampling. |
73 | 72 | */ |
74 | 73 | protected int antiAliasingSamples = 0; |
74 | + | |
75 | + /** | |
76 | + * Sets the type of Audio Renderer to be used. | |
77 | + * <p> | |
78 | + * Android MediaPlayer / SoundPool is the default and can be used on all | |
79 | + * supported Android platform versions (2.2+)<br> | |
80 | + * OpenAL Soft uses an OpenSL backend and is only supported on Android | |
81 | + * versions 2.3+. | |
82 | + * <p> | |
83 | + * Only use ANDROID_ static strings found in AppSettings | |
84 | + * | |
85 | + */ | |
86 | + protected String audioRendererType = AppSettings.ANDROID_MEDIAPLAYER; | |
87 | + | |
75 | 88 | /** |
76 | 89 | * If true Android Sensors are used as simulated Joysticks Users can use the |
77 | 90 | * Android sensor feedback through the RawInputListener or by registering |
@@ -110,7 +123,7 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt | ||
110 | 123 | /** |
111 | 124 | * Set the screen window mode. If screenFullSize is true, then the |
112 | 125 | * notification bar and title bar are removed and the screen covers the |
113 | - * entire display. If screenFullSize is false, then the notification bar | |
126 | + * entire display. If screenFullSize is false, then the notification bar | |
114 | 127 | * remains visible if screenShowTitle is true while screenFullScreen is |
115 | 128 | * false, then the title bar is also displayed under the notification bar. |
116 | 129 | */ |
@@ -200,6 +213,7 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt | ||
200 | 213 | settings.setSamples(antiAliasingSamples); |
201 | 214 | settings.setResolution(disp.getWidth(), disp.getHeight()); |
202 | 215 | settings.put(AndroidConfigChooser.SETTINGS_CONFIG_TYPE, eglConfigType); |
216 | + settings.setAudioRenderer(audioRendererType); | |
203 | 217 | |
204 | 218 | |
205 | 219 | // Create application instance |
@@ -487,10 +501,6 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt | ||
487 | 501 | AndroidAudioRenderer renderer = (AndroidAudioRenderer) result; |
488 | 502 | renderer.resumeAll(); |
489 | 503 | } |
490 | - if (result instanceof AndroidOpenALSoftAudioRenderer) { | |
491 | - AndroidOpenALSoftAudioRenderer renderer = (AndroidOpenALSoftAudioRenderer) result; | |
492 | - renderer.resumeAll(); | |
493 | - } | |
494 | 504 | } |
495 | 505 | //resume the sensors (aka joysticks) |
496 | 506 | if (app.getContext() != null) { |
@@ -530,10 +540,6 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt | ||
530 | 540 | AndroidAudioRenderer renderer = (AndroidAudioRenderer) result; |
531 | 541 | renderer.pauseAll(); |
532 | 542 | } |
533 | - if (result instanceof AndroidOpenALSoftAudioRenderer) { | |
534 | - AndroidOpenALSoftAudioRenderer renderer = (AndroidOpenALSoftAudioRenderer) result; | |
535 | - renderer.pauseAll(); | |
536 | - } | |
537 | 543 | } |
538 | 544 | //pause the sensors (aka joysticks) |
539 | 545 | if (app.getContext() != null) { |
@@ -33,8 +33,11 @@ package com.jme3.asset; | ||
33 | 33 | |
34 | 34 | import com.jme3.asset.plugins.AndroidLocator; |
35 | 35 | import com.jme3.asset.plugins.ClasspathLocator; |
36 | +import com.jme3.audio.android.AndroidAudioRenderer; | |
36 | 37 | import com.jme3.audio.plugins.AndroidAudioLoader; |
37 | -import com.jme3.texture.Texture; | |
38 | +import com.jme3.audio.plugins.WAVLoader; | |
39 | +import com.jme3.system.AppSettings; | |
40 | +import com.jme3.system.android.JmeAndroidSystem; | |
38 | 41 | import com.jme3.texture.plugins.AndroidImageLoader; |
39 | 42 | import java.net.URL; |
40 | 43 | import java.util.logging.Level; |
@@ -58,7 +61,7 @@ public class AndroidAssetManager extends DesktopAssetManager { | ||
58 | 61 | //this(Thread.currentThread().getContextClassLoader().getResource("com/jme3/asset/Android.cfg")); |
59 | 62 | this(null); |
60 | 63 | } |
61 | - | |
64 | + | |
62 | 65 | private void registerLoaderSafe(String loaderClass, String ... extensions) { |
63 | 66 | try { |
64 | 67 | Class<? extends AssetLoader> loader = (Class<? extends AssetLoader>) Class.forName(loaderClass); |
@@ -73,22 +76,32 @@ public class AndroidAssetManager extends DesktopAssetManager { | ||
73 | 76 | * If URL == null then a default list of locators and loaders for android is set |
74 | 77 | * @param configFile |
75 | 78 | */ |
76 | - public AndroidAssetManager(URL configFile) { | |
79 | + public AndroidAssetManager(URL configFile) { | |
77 | 80 | System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver"); |
78 | 81 | |
79 | - // Set Default Android config | |
82 | + // Set Default Android config | |
80 | 83 | registerLocator("", AndroidLocator.class); |
81 | 84 | registerLocator("", ClasspathLocator.class); |
82 | - | |
85 | + | |
83 | 86 | registerLoader(AndroidImageLoader.class, "jpg", "bmp", "gif", "png", "jpeg"); |
84 | - registerLoader(AndroidAudioLoader.class, "ogg", "mp3", "wav"); | |
87 | + if (JmeAndroidSystem.getAudioRendererType().equals(AppSettings.ANDROID_MEDIAPLAYER)) { | |
88 | + registerLoader(AndroidAudioLoader.class, "ogg", "mp3", "wav"); | |
89 | + } else if (JmeAndroidSystem.getAudioRendererType().equals(AppSettings.ANDROID_OPENAL_SOFT)) { | |
90 | + registerLoader(WAVLoader.class, "wav"); | |
91 | + // TODO jogg is not in core, need to add some other way to get around compile errors, or not. | |
92 | +// registerLoader(com.jme3.audio.plugins.OGGLoader.class, "ogg"); | |
93 | + registerLoaderSafe("com.jme3.audio.plugins.OGGLoader", "ogg"); | |
94 | + } else { | |
95 | + throw new IllegalStateException("No Audio Renderer Type defined!"); | |
96 | + } | |
97 | + | |
85 | 98 | registerLoader(com.jme3.material.plugins.J3MLoader.class, "j3m"); |
86 | 99 | registerLoader(com.jme3.material.plugins.J3MLoader.class, "j3md"); |
87 | 100 | registerLoader(com.jme3.material.plugins.ShaderNodeDefinitionLoader.class, "j3sn"); |
88 | 101 | registerLoader(com.jme3.shader.plugins.GLSLLoader.class, "vert", "frag", "glsl", "glsllib"); |
89 | 102 | registerLoader(com.jme3.export.binary.BinaryImporter.class, "j3o"); |
90 | 103 | registerLoader(com.jme3.font.plugins.BitmapFontLoader.class, "fnt"); |
91 | - | |
104 | + | |
92 | 105 | // Less common loaders (especially on Android) |
93 | 106 | registerLoaderSafe("com.jme3.texture.plugins.DDSLoader", "dds"); |
94 | 107 | registerLoaderSafe("com.jme3.texture.plugins.PFMLoader", "pfm"); |
@@ -100,9 +113,9 @@ public class AndroidAssetManager extends DesktopAssetManager { | ||
100 | 113 | registerLoaderSafe("com.jme3.scene.plugins.ogre.SkeletonLoader", "skeleton.xml"); |
101 | 114 | registerLoaderSafe("com.jme3.scene.plugins.ogre.MaterialLoader", "material"); |
102 | 115 | registerLoaderSafe("com.jme3.scene.plugins.ogre.SceneLoader", "scene"); |
103 | - | |
116 | + | |
104 | 117 | |
105 | 118 | logger.fine("AndroidAssetManager created."); |
106 | 119 | } |
107 | - | |
120 | + | |
108 | 121 | } |
@@ -1,523 +1,24 @@ | ||
1 | -/* | |
2 | - * Copyright (c) 2009-2012 jMonkeyEngine | |
3 | - * All rights reserved. | |
4 | - * | |
5 | - * Redistribution and use in source and binary forms, with or without | |
6 | - * modification, are permitted provided that the following conditions are | |
7 | - * met: | |
8 | - * | |
9 | - * * Redistributions of source code must retain the above copyright | |
10 | - * notice, this list of conditions and the following disclaimer. | |
11 | - * | |
12 | - * * Redistributions in binary form must reproduce the above copyright | |
13 | - * notice, this list of conditions and the following disclaimer in the | |
14 | - * documentation and/or other materials provided with the distribution. | |
15 | - * | |
16 | - * * Neither the name of 'jMonkeyEngine' nor the names of its contributors | |
17 | - * may be used to endorse or promote products derived from this software | |
18 | - * without specific prior written permission. | |
19 | - * | |
20 | - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
21 | - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED | |
22 | - * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
23 | - * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR | |
24 | - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
25 | - * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
26 | - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
27 | - * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF | |
28 | - * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING | |
29 | - * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | |
30 | - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
31 | - */ | |
32 | 1 | package com.jme3.audio.android; |
33 | 2 | |
34 | -import android.app.Activity; | |
35 | -import android.content.Context; | |
36 | -import android.content.res.AssetFileDescriptor; | |
37 | -import android.content.res.AssetManager; | |
38 | -import android.media.AudioManager; | |
39 | -import android.media.MediaPlayer; | |
40 | -import android.media.SoundPool; | |
41 | -import com.jme3.asset.AssetKey; | |
42 | -import com.jme3.audio.*; | |
43 | -import com.jme3.audio.AudioSource.Status; | |
44 | -import com.jme3.math.FastMath; | |
45 | -import com.jme3.math.Vector3f; | |
46 | -import java.io.IOException; | |
47 | -import java.util.HashMap; | |
48 | -import java.util.logging.Level; | |
49 | -import java.util.logging.Logger; | |
3 | +import com.jme3.audio.AudioRenderer; | |
50 | 4 | |
51 | 5 | /** |
52 | - * This class is the android implementation for {@link AudioRenderer} | |
6 | + * Android specific AudioRenderer interface that supports pausing and resuming | |
7 | + * audio files when the app is minimized or placed in the background | |
53 | 8 | * |
54 | - * @author larynx | |
55 | - * @author plan_rich | |
9 | + * @author iwgeric | |
56 | 10 | */ |
57 | -public class AndroidAudioRenderer implements AudioRenderer, | |
58 | - SoundPool.OnLoadCompleteListener, MediaPlayer.OnCompletionListener { | |
59 | - | |
60 | - private static final Logger logger = Logger.getLogger(AndroidAudioRenderer.class.getName()); | |
61 | - private final static int MAX_NUM_CHANNELS = 16; | |
62 | - private final HashMap<AudioSource, MediaPlayer> musicPlaying = new HashMap<AudioSource, MediaPlayer>(); | |
63 | - private SoundPool soundPool = null; | |
64 | - private final Vector3f listenerPosition = new Vector3f(); | |
65 | - // For temp use | |
66 | - private final Vector3f distanceVector = new Vector3f(); | |
67 | - private final AssetManager assetManager; | |
68 | - private HashMap<Integer, AudioSource> soundpoolStillLoading = new HashMap<Integer, AudioSource>(); | |
69 | - private Listener listener; | |
70 | - private boolean audioDisabled = false; | |
71 | - private final AudioManager manager; | |
72 | - | |
73 | - public AndroidAudioRenderer(Activity context) { | |
74 | - manager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); | |
75 | - context.setVolumeControlStream(AudioManager.STREAM_MUSIC); | |
76 | - assetManager = context.getAssets(); | |
77 | - } | |
78 | - | |
79 | - @Override | |
80 | - public void initialize() { | |
81 | - soundPool = new SoundPool(MAX_NUM_CHANNELS, AudioManager.STREAM_MUSIC, | |
82 | - 0); | |
83 | - soundPool.setOnLoadCompleteListener(this); | |
84 | - } | |
85 | - | |
86 | - @Override | |
87 | - public void updateSourceParam(AudioSource src, AudioParam param) { | |
88 | - if (audioDisabled) { | |
89 | - return; | |
90 | - } | |
91 | - | |
92 | - if (src.getChannel() < 0) { | |
93 | - return; | |
94 | - } | |
95 | - | |
96 | - switch (param) { | |
97 | - case Position: | |
98 | - if (!src.isPositional()) { | |
99 | - return; | |
100 | - } | |
101 | - | |
102 | - Vector3f pos = src.getPosition(); | |
103 | - break; | |
104 | - case Velocity: | |
105 | - if (!src.isPositional()) { | |
106 | - return; | |
107 | - } | |
108 | - | |
109 | - Vector3f vel = src.getVelocity(); | |
110 | - break; | |
111 | - case MaxDistance: | |
112 | - if (!src.isPositional()) { | |
113 | - return; | |
114 | - } | |
115 | - break; | |
116 | - case RefDistance: | |
117 | - if (!src.isPositional()) { | |
118 | - return; | |
119 | - } | |
120 | - break; | |
121 | - case ReverbFilter: | |
122 | - if (!src.isPositional() || !src.isReverbEnabled()) { | |
123 | - return; | |
124 | - } | |
125 | - break; | |
126 | - case ReverbEnabled: | |
127 | - if (!src.isPositional()) { | |
128 | - return; | |
129 | - } | |
130 | - | |
131 | - if (src.isReverbEnabled()) { | |
132 | - updateSourceParam(src, AudioParam.ReverbFilter); | |
133 | - } | |
134 | - break; | |
135 | - case IsPositional: | |
136 | - break; | |
137 | - case Direction: | |
138 | - if (!src.isDirectional()) { | |
139 | - return; | |
140 | - } | |
141 | - | |
142 | - Vector3f dir = src.getDirection(); | |
143 | - break; | |
144 | - case InnerAngle: | |
145 | - if (!src.isDirectional()) { | |
146 | - return; | |
147 | - } | |
148 | - break; | |
149 | - case OuterAngle: | |
150 | - if (!src.isDirectional()) { | |
151 | - return; | |
152 | - } | |
153 | - break; | |
154 | - case IsDirectional: | |
155 | - if (src.isDirectional()) { | |
156 | - updateSourceParam(src, AudioParam.Direction); | |
157 | - updateSourceParam(src, AudioParam.InnerAngle); | |
158 | - updateSourceParam(src, AudioParam.OuterAngle); | |
159 | - } else { | |
160 | - } | |
161 | - break; | |
162 | - case DryFilter: | |
163 | - if (src.getDryFilter() != null) { | |
164 | - Filter f = src.getDryFilter(); | |
165 | - if (f.isUpdateNeeded()) { | |
166 | - // updateFilter(f); | |
167 | - } | |
168 | - } | |
169 | - break; | |
170 | - case Looping: | |
171 | - if (src.isLooping()) { | |
172 | - } | |
173 | - break; | |
174 | - case Volume: | |
175 | - MediaPlayer mp = musicPlaying.get(src); | |
176 | - if (mp != null) { | |
177 | - mp.setVolume(src.getVolume(), src.getVolume()); | |
178 | - } else { | |
179 | - soundPool.setVolume(src.getChannel(), src.getVolume(), | |
180 | - src.getVolume()); | |
181 | - } | |
182 | - | |
183 | - break; | |
184 | - case Pitch: | |
185 | - | |
186 | - break; | |
187 | - } | |
188 | - | |
189 | - } | |
190 | - | |
191 | - @Override | |
192 | - public void updateListenerParam(Listener listener, ListenerParam param) { | |
193 | - if (audioDisabled) { | |
194 | - return; | |
195 | - } | |
196 | - | |
197 | - switch (param) { | |
198 | - case Position: | |
199 | - listenerPosition.set(listener.getLocation()); | |
200 | - break; | |
201 | - case Rotation: | |
202 | - Vector3f dir = listener.getDirection(); | |
203 | - Vector3f up = listener.getUp(); | |
204 | - | |
205 | - break; | |
206 | - case Velocity: | |
207 | - Vector3f vel = listener.getVelocity(); | |
208 | - | |
209 | - break; | |
210 | - case Volume: | |
211 | - // alListenerf(AL_GAIN, listener.getVolume()); | |
212 | - break; | |
213 | - } | |
214 | - | |
215 | - } | |
216 | - | |
217 | - @Override | |
218 | - public void update(float tpf) { | |
219 | - float distance; | |
220 | - float volume; | |
221 | - | |
222 | - // Loop over all mediaplayers | |
223 | - for (AudioSource src : musicPlaying.keySet()) { | |
224 | - | |
225 | - MediaPlayer mp = musicPlaying.get(src); | |
226 | - | |
227 | - // Calc the distance to the listener | |
228 | - distanceVector.set(listenerPosition); | |
229 | - distanceVector.subtractLocal(src.getPosition()); | |
230 | - distance = FastMath.abs(distanceVector.length()); | |
231 | - | |
232 | - if (distance < src.getRefDistance()) { | |
233 | - distance = src.getRefDistance(); | |
234 | - } | |
235 | - if (distance > src.getMaxDistance()) { | |
236 | - distance = src.getMaxDistance(); | |
237 | - } | |
238 | - volume = src.getRefDistance() / distance; | |
239 | - | |
240 | - AndroidAudioData audioData = (AndroidAudioData) src.getAudioData(); | |
241 | - | |
242 | - if (FastMath.abs(audioData.getCurrentVolume() - volume) > FastMath.FLT_EPSILON) { | |
243 | - // Left / Right channel get the same volume by now, only | |
244 | - // positional | |
245 | - mp.setVolume(volume, volume); | |
246 | - | |
247 | - audioData.setCurrentVolume(volume); | |
248 | - } | |
249 | - | |
250 | - } | |
251 | - } | |
252 | - | |
253 | - public void setListener(Listener listener) { | |
254 | - if (audioDisabled) { | |
255 | - return; | |
256 | - } | |
257 | - | |
258 | - if (this.listener != null) { | |
259 | - // previous listener no longer associated with current | |
260 | - // renderer | |
261 | - this.listener.setRenderer(null); | |
262 | - } | |
263 | - | |
264 | - this.listener = listener; | |
265 | - this.listener.setRenderer(this); | |
266 | - | |
267 | - } | |
268 | - | |
269 | - @Override | |
270 | - public void cleanup() { | |
271 | - // Cleanup sound pool | |
272 | - if (soundPool != null) { | |
273 | - soundPool.release(); | |
274 | - soundPool = null; | |
275 | - } | |
276 | - | |
277 | - // Cleanup media player | |
278 | - for (AudioSource src : musicPlaying.keySet()) { | |
279 | - MediaPlayer mp = musicPlaying.get(src); | |
280 | - { | |
281 | - mp.stop(); | |
282 | - mp.release(); | |
283 | - src.setStatus(Status.Stopped); | |
284 | - } | |
285 | - } | |
286 | - musicPlaying.clear(); | |
287 | - } | |
288 | - | |
289 | - @Override | |
290 | - public void onCompletion(MediaPlayer mp) { | |
291 | - if (mp.isPlaying()) { | |
292 | - mp.seekTo(0); | |
293 | - mp.stop(); | |
294 | - } | |
295 | - // XXX: This has bad performance -> maybe change overall structure of | |
296 | - // mediaplayer in this audiorenderer? | |
297 | - for (AudioSource src : musicPlaying.keySet()) { | |
298 | - if (musicPlaying.get(src) == mp) { | |
299 | - src.setStatus(Status.Stopped); | |
300 | - break; | |
301 | - } | |
302 | - } | |
303 | - | |
304 | - } | |
11 | +public interface AndroidAudioRenderer extends AudioRenderer { | |
305 | 12 | |
306 | 13 | /** |
307 | - * Plays using the {@link SoundPool} of Android. Due to hard limitation of | |
308 | - * the SoundPool: After playing more instances of the sound you only have | |
309 | - * the channel of the last played instance. | |
310 | - * | |
311 | - * It is not possible to get information about the state of the soundpool of | |
312 | - * a specific streamid, so removing is not possilbe -> noone knows when | |
313 | - * sound finished. | |
14 | + * Pauses all Playing audio. To be used when the app is placed in the | |
15 | + * background. | |
314 | 16 | */ |
315 | - public void playSourceInstance(AudioSource src) { | |
316 | - if (audioDisabled) { | |
317 | - return; | |
318 | - } | |
319 | - | |
320 | - AndroidAudioData audioData = (AndroidAudioData) src.getAudioData(); | |
321 | - | |
322 | - if (!(audioData.getAssetKey() instanceof AudioKey)) { | |
323 | - throw new IllegalArgumentException("Asset is not a AudioKey"); | |
324 | - } | |
325 | - | |
326 | - AudioKey assetKey = (AudioKey) audioData.getAssetKey(); | |
327 | - | |
328 | - try { | |
329 | - | |
330 | - if (audioData.getId() < 0) { // found something to load | |
331 | - int soundId = soundPool.load( | |
332 | - assetManager.openFd(assetKey.getName()), 1); | |
333 | - audioData.setId(soundId); | |
334 | - } | |
335 | - | |
336 | - int channel = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f); | |
337 | - | |
338 | - if (channel == 0) { | |
339 | - soundpoolStillLoading.put(audioData.getId(), src); | |
340 | - } else { | |
341 | - if (src.getStatus() != Status.Stopped) { | |
342 | - soundPool.stop(channel); | |
343 | - src.setStatus(Status.Stopped); | |
344 | - } | |
345 | - src.setChannel(channel); // receive a channel at the last | |
346 | - setSourceParams(src); | |
347 | - // playing at least | |
348 | - | |
349 | - | |
350 | - } | |
351 | - } catch (IOException e) { | |
352 | - logger.log(Level.SEVERE, | |
353 | - "Failed to load sound " + assetKey.getName(), e); | |
354 | - audioData.setId(-1); | |
355 | - } | |
356 | - } | |
357 | - | |
358 | - @Override | |
359 | - public void onLoadComplete(SoundPool soundPool, int sampleId, int status) { | |
360 | - AudioSource src = soundpoolStillLoading.remove(sampleId); | |
361 | - | |
362 | - if (src == null) { | |
363 | - logger.warning("Something went terribly wrong! onLoadComplete" | |
364 | - + " had sampleId which was not in the HashMap of loading items"); | |
365 | - return; | |
366 | - } | |
367 | - | |
368 | - AudioData audioData = src.getAudioData(); | |
369 | - | |
370 | - // load was successfull | |
371 | - if (status == 0) { | |
372 | - int channelIndex; | |
373 | - channelIndex = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f); | |
374 | - src.setChannel(channelIndex); | |
375 | - setSourceParams(src); | |
376 | - } | |
377 | - } | |
378 | - | |
379 | - public void playSource(AudioSource src) { | |
380 | - if (audioDisabled) { | |
381 | - return; | |
382 | - } | |
383 | - | |
384 | - AndroidAudioData audioData = (AndroidAudioData) src.getAudioData(); | |
385 | - | |
386 | - MediaPlayer mp = musicPlaying.get(src); | |
387 | - if (mp == null) { | |
388 | - mp = new MediaPlayer(); | |
389 | - mp.setOnCompletionListener(this); | |
390 | - mp.setAudioStreamType(AudioManager.STREAM_MUSIC); | |
391 | - } | |
392 | - | |
393 | - try { | |
394 | - if (src.getStatus() == Status.Stopped) { | |
395 | - mp.reset(); | |
396 | - AssetKey<?> key = audioData.getAssetKey(); | |
397 | - | |
398 | - AssetFileDescriptor afd = assetManager.openFd(key.getName()); // assetKey.getName() | |
399 | - mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), | |
400 | - afd.getLength()); | |
401 | - mp.prepare(); | |
402 | - setSourceParams(src, mp); | |
403 | - src.setChannel(0); | |
404 | - src.setStatus(Status.Playing); | |
405 | - musicPlaying.put(src, mp); | |
406 | - mp.start(); | |
407 | - } else { | |
408 | - mp.start(); | |
409 | - } | |
410 | - } catch (IllegalStateException e) { | |
411 | - e.printStackTrace(); | |
412 | - } catch (Exception e) { | |
413 | - e.printStackTrace(); | |
414 | - } | |
415 | - } | |
416 | - | |
417 | - private void setSourceParams(AudioSource src, MediaPlayer mp) { | |
418 | - mp.setLooping(src.isLooping()); | |
419 | - mp.setVolume(src.getVolume(), src.getVolume()); | |
420 | - //src.getDryFilter(); | |
421 | - } | |
422 | - | |
423 | - private void setSourceParams(AudioSource src) { | |
424 | - soundPool.setLoop(src.getChannel(), src.isLooping() ? -1 : 0); | |
425 | - soundPool.setVolume(src.getChannel(), src.getVolume(), src.getVolume()); | |
426 | - } | |
17 | + public void pauseAll(); | |
427 | 18 | |
428 | 19 | /** |
429 | - * Pause the current playing sounds. Both from the {@link SoundPool} and the | |
430 | - * active {@link MediaPlayer}s | |
20 | + * Resumes all Paused audio. To be used when the app is brought back to | |
21 | + * the foreground. | |
431 | 22 | */ |
432 | - public void pauseAll() { | |
433 | - if (soundPool != null) { | |
434 | - soundPool.autoPause(); | |
435 | - for (MediaPlayer mp : musicPlaying.values()) { | |
436 | - if(mp.isPlaying()){ | |
437 | - mp.pause(); | |
438 | - } | |
439 | - } | |
440 | - } | |
441 | - } | |
442 | - | |
443 | - /** | |
444 | - * Resume all paused sounds. | |
445 | - */ | |
446 | - public void resumeAll() { | |
447 | - if (soundPool != null) { | |
448 | - soundPool.autoResume(); | |
449 | - for (MediaPlayer mp : musicPlaying.values()) { | |
450 | - mp.start(); //no resume -> api says call start to resume | |
451 | - } | |
452 | - } | |
453 | - } | |
454 | - | |
455 | - public void pauseSource(AudioSource src) { | |
456 | - if (audioDisabled) { | |
457 | - return; | |
458 | - } | |
459 | - | |
460 | - MediaPlayer mp = musicPlaying.get(src); | |
461 | - if (mp != null) { | |
462 | - mp.pause(); | |
463 | - src.setStatus(Status.Paused); | |
464 | - } else { | |
465 | - int channel = src.getChannel(); | |
466 | - if (channel != -1) { | |
467 | - soundPool.pause(channel); // is not very likley to make | |
468 | - } // something useful :) | |
469 | - } | |
470 | - } | |
471 | - | |
472 | - public void stopSource(AudioSource src) { | |
473 | - if (audioDisabled) { | |
474 | - return; | |
475 | - } | |
476 | - | |
477 | - // can be stream or buffer -> so try to get mediaplayer | |
478 | - // if there is non try to stop soundpool | |
479 | - MediaPlayer mp = musicPlaying.get(src); | |
480 | - if (mp != null) { | |
481 | - mp.stop(); | |
482 | - mp.reset(); | |
483 | - src.setStatus(Status.Stopped); | |
484 | - } else { | |
485 | - int channel = src.getChannel(); | |
486 | - if (channel != -1) { | |
487 | - soundPool.pause(channel); // is not very likley to make | |
488 | - // something useful :) | |
489 | - } | |
490 | - } | |
491 | - | |
492 | - } | |
493 | - | |
494 | - @Override | |
495 | - public void deleteAudioData(AudioData ad) { | |
496 | - | |
497 | - for (AudioSource src : musicPlaying.keySet()) { | |
498 | - if (src.getAudioData() == ad) { | |
499 | - MediaPlayer mp = musicPlaying.remove(src); | |
500 | - mp.stop(); | |
501 | - mp.release(); | |
502 | - src.setStatus(Status.Stopped); | |
503 | - src.setChannel(-1); | |
504 | - ad.setId(-1); | |
505 | - break; | |
506 | - } | |
507 | - } | |
508 | - | |
509 | - if (ad.getId() > 0) { | |
510 | - soundPool.unload(ad.getId()); | |
511 | - ad.setId(-1); | |
512 | - } | |
513 | - } | |
514 | - | |
515 | - @Override | |
516 | - public void setEnvironment(Environment env) { | |
517 | - // not yet supported | |
518 | - } | |
519 | - | |
520 | - @Override | |
521 | - public void deleteFilter(Filter filter) { | |
522 | - } | |
23 | + public void resumeAll(); | |
523 | 24 | } |
@@ -0,0 +1,523 @@ | ||
1 | +/* | |
2 | + * Copyright (c) 2009-2012 jMonkeyEngine | |
3 | + * All rights reserved. | |
4 | + * | |
5 | + * Redistribution and use in source and binary forms, with or without | |
6 | + * modification, are permitted provided that the following conditions are | |
7 | + * met: | |
8 | + * | |
9 | + * * Redistributions of source code must retain the above copyright | |
10 | + * notice, this list of conditions and the following disclaimer. | |
11 | + * | |
12 | + * * Redistributions in binary form must reproduce the above copyright | |
13 | + * notice, this list of conditions and the following disclaimer in the | |
14 | + * documentation and/or other materials provided with the distribution. | |
15 | + * | |
16 | + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors | |
17 | + * may be used to endorse or promote products derived from this software | |
18 | + * without specific prior written permission. | |
19 | + * | |
20 | + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
21 | + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED | |
22 | + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
23 | + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR | |
24 | + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
25 | + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
26 | + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
27 | + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF | |
28 | + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING | |
29 | + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | |
30 | + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
31 | + */ | |
32 | +package com.jme3.audio.android; | |
33 | + | |
34 | +import android.app.Activity; | |
35 | +import android.content.Context; | |
36 | +import android.content.res.AssetFileDescriptor; | |
37 | +import android.content.res.AssetManager; | |
38 | +import android.media.AudioManager; | |
39 | +import android.media.MediaPlayer; | |
40 | +import android.media.SoundPool; | |
41 | +import com.jme3.asset.AssetKey; | |
42 | +import com.jme3.audio.*; | |
43 | +import com.jme3.audio.AudioSource.Status; | |
44 | +import com.jme3.math.FastMath; | |
45 | +import com.jme3.math.Vector3f; | |
46 | +import java.io.IOException; | |
47 | +import java.util.HashMap; | |
48 | +import java.util.logging.Level; | |
49 | +import java.util.logging.Logger; | |
50 | + | |
51 | +/** | |
52 | + * This class is the android implementation for {@link AudioRenderer} | |
53 | + * | |
54 | + * @author larynx | |
55 | + * @author plan_rich | |
56 | + */ | |
57 | +public class AndroidMediaPlayerAudioRenderer implements AndroidAudioRenderer, | |
58 | + SoundPool.OnLoadCompleteListener, MediaPlayer.OnCompletionListener { | |
59 | + | |
60 | + private static final Logger logger = Logger.getLogger(AndroidMediaPlayerAudioRenderer.class.getName()); | |
61 | + private final static int MAX_NUM_CHANNELS = 16; | |
62 | + private final HashMap<AudioSource, MediaPlayer> musicPlaying = new HashMap<AudioSource, MediaPlayer>(); | |
63 | + private SoundPool soundPool = null; | |
64 | + private final Vector3f listenerPosition = new Vector3f(); | |
65 | + // For temp use | |
66 | + private final Vector3f distanceVector = new Vector3f(); | |
67 | + private final AssetManager assetManager; | |
68 | + private HashMap<Integer, AudioSource> soundpoolStillLoading = new HashMap<Integer, AudioSource>(); | |
69 | + private Listener listener; | |
70 | + private boolean audioDisabled = false; | |
71 | + private final AudioManager manager; | |
72 | + | |
73 | + public AndroidMediaPlayerAudioRenderer(Activity context) { | |
74 | + manager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); | |
75 | + context.setVolumeControlStream(AudioManager.STREAM_MUSIC); | |
76 | + assetManager = context.getAssets(); | |
77 | + } | |
78 | + | |
79 | + @Override | |
80 | + public void initialize() { | |
81 | + soundPool = new SoundPool(MAX_NUM_CHANNELS, AudioManager.STREAM_MUSIC, | |
82 | + 0); | |
83 | + soundPool.setOnLoadCompleteListener(this); | |
84 | + } | |
85 | + | |
86 | + @Override | |
87 | + public void updateSourceParam(AudioSource src, AudioParam param) { | |
88 | + if (audioDisabled) { | |
89 | + return; | |
90 | + } | |
91 | + | |
92 | + if (src.getChannel() < 0) { | |
93 | + return; | |
94 | + } | |
95 | + | |
96 | + switch (param) { | |
97 | + case Position: | |
98 | + if (!src.isPositional()) { | |
99 | + return; | |
100 | + } | |
101 | + | |
102 | + Vector3f pos = src.getPosition(); | |
103 | + break; | |
104 | + case Velocity: | |
105 | + if (!src.isPositional()) { | |
106 | + return; | |
107 | + } | |
108 | + | |
109 | + Vector3f vel = src.getVelocity(); | |
110 | + break; | |
111 | + case MaxDistance: | |
112 | + if (!src.isPositional()) { | |
113 | + return; | |
114 | + } | |
115 | + break; | |
116 | + case RefDistance: | |
117 | + if (!src.isPositional()) { | |
118 | + return; | |
119 | + } | |
120 | + break; | |
121 | + case ReverbFilter: | |
122 | + if (!src.isPositional() || !src.isReverbEnabled()) { | |
123 | + return; | |
124 | + } | |
125 | + break; | |
126 | + case ReverbEnabled: | |
127 | + if (!src.isPositional()) { | |
128 | + return; | |
129 | + } | |
130 | + | |
131 | + if (src.isReverbEnabled()) { | |
132 | + updateSourceParam(src, AudioParam.ReverbFilter); | |
133 | + } | |
134 | + break; | |
135 | + case IsPositional: | |
136 | + break; | |
137 | + case Direction: | |
138 | + if (!src.isDirectional()) { | |
139 | + return; | |
140 | + } | |
141 | + | |
142 | + Vector3f dir = src.getDirection(); | |
143 | + break; | |
144 | + case InnerAngle: | |
145 | + if (!src.isDirectional()) { | |
146 | + return; | |
147 | + } | |
148 | + break; | |
149 | + case OuterAngle: | |
150 | + if (!src.isDirectional()) { | |
151 | + return; | |
152 | + } | |
153 | + break; | |
154 | + case IsDirectional: | |
155 | + if (src.isDirectional()) { | |
156 | + updateSourceParam(src, AudioParam.Direction); | |
157 | + updateSourceParam(src, AudioParam.InnerAngle); | |
158 | + updateSourceParam(src, AudioParam.OuterAngle); | |
159 | + } else { | |
160 | + } | |
161 | + break; | |
162 | + case DryFilter: | |
163 | + if (src.getDryFilter() != null) { | |
164 | + Filter f = src.getDryFilter(); | |
165 | + if (f.isUpdateNeeded()) { | |
166 | + // updateFilter(f); | |
167 | + } | |
168 | + } | |
169 | + break; | |
170 | + case Looping: | |
171 | + if (src.isLooping()) { | |
172 | + } | |
173 | + break; | |
174 | + case Volume: | |
175 | + MediaPlayer mp = musicPlaying.get(src); | |
176 | + if (mp != null) { | |
177 | + mp.setVolume(src.getVolume(), src.getVolume()); | |
178 | + } else { | |
179 | + soundPool.setVolume(src.getChannel(), src.getVolume(), | |
180 | + src.getVolume()); | |
181 | + } | |
182 | + | |
183 | + break; | |
184 | + case Pitch: | |
185 | + | |
186 | + break; | |
187 | + } | |
188 | + | |
189 | + } | |
190 | + | |
191 | + @Override | |
192 | + public void updateListenerParam(Listener listener, ListenerParam param) { | |
193 | + if (audioDisabled) { | |
194 | + return; | |
195 | + } | |
196 | + | |
197 | + switch (param) { | |
198 | + case Position: | |
199 | + listenerPosition.set(listener.getLocation()); | |
200 | + break; | |
201 | + case Rotation: | |
202 | + Vector3f dir = listener.getDirection(); | |
203 | + Vector3f up = listener.getUp(); | |
204 | + | |
205 | + break; | |
206 | + case Velocity: | |
207 | + Vector3f vel = listener.getVelocity(); | |
208 | + | |
209 | + break; | |
210 | + case Volume: | |
211 | + // alListenerf(AL_GAIN, listener.getVolume()); | |
212 | + break; | |
213 | + } | |
214 | + | |
215 | + } | |
216 | + | |
217 | + @Override | |
218 | + public void update(float tpf) { | |
219 | + float distance; | |
220 | + float volume; | |
221 | + | |
222 | + // Loop over all mediaplayers | |
223 | + for (AudioSource src : musicPlaying.keySet()) { | |
224 | + | |
225 | + MediaPlayer mp = musicPlaying.get(src); | |
226 | + | |
227 | + // Calc the distance to the listener | |
228 | + distanceVector.set(listenerPosition); | |
229 | + distanceVector.subtractLocal(src.getPosition()); | |
230 | + distance = FastMath.abs(distanceVector.length()); | |
231 | + | |
232 | + if (distance < src.getRefDistance()) { | |
233 | + distance = src.getRefDistance(); | |
234 | + } | |
235 | + if (distance > src.getMaxDistance()) { | |
236 | + distance = src.getMaxDistance(); | |
237 | + } | |
238 | + volume = src.getRefDistance() / distance; | |
239 | + | |
240 | + AndroidAudioData audioData = (AndroidAudioData) src.getAudioData(); | |
241 | + | |
242 | + if (FastMath.abs(audioData.getCurrentVolume() - volume) > FastMath.FLT_EPSILON) { | |
243 | + // Left / Right channel get the same volume by now, only | |
244 | + // positional | |
245 | + mp.setVolume(volume, volume); | |
246 | + | |
247 | + audioData.setCurrentVolume(volume); | |
248 | + } | |
249 | + | |
250 | + } | |
251 | + } | |
252 | + | |
253 | + public void setListener(Listener listener) { | |
254 | + if (audioDisabled) { | |
255 | + return; | |
256 | + } | |
257 | + | |
258 | + if (this.listener != null) { | |
259 | + // previous listener no longer associated with current | |
260 | + // renderer | |
261 | + this.listener.setRenderer(null); | |
262 | + } | |
263 | + | |
264 | + this.listener = listener; | |
265 | + this.listener.setRenderer(this); | |
266 | + | |
267 | + } | |
268 | + | |
269 | + @Override | |
270 | + public void cleanup() { | |
271 | + // Cleanup sound pool | |
272 | + if (soundPool != null) { | |
273 | + soundPool.release(); | |
274 | + soundPool = null; | |
275 | + } | |
276 | + | |
277 | + // Cleanup media player | |
278 | + for (AudioSource src : musicPlaying.keySet()) { | |
279 | + MediaPlayer mp = musicPlaying.get(src); | |
280 | + { | |
281 | + mp.stop(); | |
282 | + mp.release(); | |
283 | + src.setStatus(Status.Stopped); | |
284 | + } | |
285 | + } | |
286 | + musicPlaying.clear(); | |
287 | + } | |
288 | + | |
289 | + @Override | |
290 | + public void onCompletion(MediaPlayer mp) { | |
291 | + if (mp.isPlaying()) { | |
292 | + mp.seekTo(0); | |
293 | + mp.stop(); | |
294 | + } | |
295 | + // XXX: This has bad performance -> maybe change overall structure of | |
296 | + // mediaplayer in this audiorenderer? | |
297 | + for (AudioSource src : musicPlaying.keySet()) { | |
298 | + if (musicPlaying.get(src) == mp) { | |
299 | + src.setStatus(Status.Stopped); | |
300 | + break; | |
301 | + } | |
302 | + } | |
303 | + | |
304 | + } | |
305 | + | |
306 | + /** | |
307 | + * Plays using the {@link SoundPool} of Android. Due to hard limitation of | |
308 | + * the SoundPool: After playing more instances of the sound you only have | |
309 | + * the channel of the last played instance. | |
310 | + * | |
311 | + * It is not possible to get information about the state of the soundpool of | |
312 | + * a specific streamid, so removing is not possilbe -> noone knows when | |
313 | + * sound finished. | |
314 | + */ | |
315 | + public void playSourceInstance(AudioSource src) { | |
316 | + if (audioDisabled) { | |
317 | + return; | |
318 | + } | |
319 | + | |
320 | + AndroidAudioData audioData = (AndroidAudioData) src.getAudioData(); | |
321 | + | |
322 | + if (!(audioData.getAssetKey() instanceof AudioKey)) { | |
323 | + throw new IllegalArgumentException("Asset is not a AudioKey"); | |
324 | + } | |
325 | + | |
326 | + AudioKey assetKey = (AudioKey) audioData.getAssetKey(); | |
327 | + | |
328 | + try { | |
329 | + | |
330 | + if (audioData.getId() < 0) { // found something to load | |
331 | + int soundId = soundPool.load( | |
332 | + assetManager.openFd(assetKey.getName()), 1); | |
333 | + audioData.setId(soundId); | |
334 | + } | |
335 | + | |
336 | + int channel = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f); | |
337 | + | |
338 | + if (channel == 0) { | |
339 | + soundpoolStillLoading.put(audioData.getId(), src); | |
340 | + } else { | |
341 | + if (src.getStatus() != Status.Stopped) { | |
342 | + soundPool.stop(channel); | |
343 | + src.setStatus(Status.Stopped); | |
344 | + } | |
345 | + src.setChannel(channel); // receive a channel at the last | |
346 | + setSourceParams(src); | |
347 | + // playing at least | |
348 | + | |
349 | + | |
350 | + } | |
351 | + } catch (IOException e) { | |
352 | + logger.log(Level.SEVERE, | |
353 | + "Failed to load sound " + assetKey.getName(), e); | |
354 | + audioData.setId(-1); | |
355 | + } | |
356 | + } | |
357 | + | |
358 | + @Override | |
359 | + public void onLoadComplete(SoundPool soundPool, int sampleId, int status) { | |
360 | + AudioSource src = soundpoolStillLoading.remove(sampleId); | |
361 | + | |
362 | + if (src == null) { | |
363 | + logger.warning("Something went terribly wrong! onLoadComplete" | |
364 | + + " had sampleId which was not in the HashMap of loading items"); | |
365 | + return; | |
366 | + } | |
367 | + | |
368 | + AudioData audioData = src.getAudioData(); | |
369 | + | |
370 | + // load was successfull | |
371 | + if (status == 0) { | |
372 | + int channelIndex; | |
373 | + channelIndex = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f); | |
374 | + src.setChannel(channelIndex); | |
375 | + setSourceParams(src); | |
376 | + } | |
377 | + } | |
378 | + | |
379 | + public void playSource(AudioSource src) { | |
380 | + if (audioDisabled) { | |
381 | + return; | |
382 | + } | |
383 | + | |
384 | + AndroidAudioData audioData = (AndroidAudioData) src.getAudioData(); | |
385 | + | |
386 | + MediaPlayer mp = musicPlaying.get(src); | |
387 | + if (mp == null) { | |
388 | + mp = new MediaPlayer(); | |
389 | + mp.setOnCompletionListener(this); | |
390 | + mp.setAudioStreamType(AudioManager.STREAM_MUSIC); | |
391 | + } | |
392 | + | |
393 | + try { | |
394 | + if (src.getStatus() == Status.Stopped) { | |
395 | + mp.reset(); | |
396 | + AssetKey<?> key = audioData.getAssetKey(); | |
397 | + | |
398 | + AssetFileDescriptor afd = assetManager.openFd(key.getName()); // assetKey.getName() | |
399 | + mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), | |
400 | + afd.getLength()); | |
401 | + mp.prepare(); | |
402 | + setSourceParams(src, mp); | |
403 | + src.setChannel(0); | |
404 | + src.setStatus(Status.Playing); | |
405 | + musicPlaying.put(src, mp); | |
406 | + mp.start(); | |
407 | + } else { | |
408 | + mp.start(); | |
409 | + } | |
410 | + } catch (IllegalStateException e) { | |
411 | + e.printStackTrace(); | |
412 | + } catch (Exception e) { | |
413 | + e.printStackTrace(); | |
414 | + } | |
415 | + } | |
416 | + | |
417 | + private void setSourceParams(AudioSource src, MediaPlayer mp) { | |
418 | + mp.setLooping(src.isLooping()); | |
419 | + mp.setVolume(src.getVolume(), src.getVolume()); | |
420 | + //src.getDryFilter(); | |
421 | + } | |
422 | + | |
423 | + private void setSourceParams(AudioSource src) { | |
424 | + soundPool.setLoop(src.getChannel(), src.isLooping() ? -1 : 0); | |
425 | + soundPool.setVolume(src.getChannel(), src.getVolume(), src.getVolume()); | |
426 | + } | |
427 | + | |
428 | + /** | |
429 | + * Pause the current playing sounds. Both from the {@link SoundPool} and the | |
430 | + * active {@link MediaPlayer}s | |
431 | + */ | |
432 | + public void pauseAll() { | |
433 | + if (soundPool != null) { | |
434 | + soundPool.autoPause(); | |
435 | + for (MediaPlayer mp : musicPlaying.values()) { | |
436 | + if(mp.isPlaying()){ | |
437 | + mp.pause(); | |
438 | + } | |
439 | + } | |
440 | + } | |
441 | + } | |
442 | + | |
443 | + /** | |
444 | + * Resume all paused sounds. | |
445 | + */ | |
446 | + public void resumeAll() { | |
447 | + if (soundPool != null) { | |
448 | + soundPool.autoResume(); | |
449 | + for (MediaPlayer mp : musicPlaying.values()) { | |
450 | + mp.start(); //no resume -> api says call start to resume | |
451 | + } | |
452 | + } | |
453 | + } | |
454 | + | |
455 | + public void pauseSource(AudioSource src) { | |
456 | + if (audioDisabled) { | |
457 | + return; | |
458 | + } | |
459 | + | |
460 | + MediaPlayer mp = musicPlaying.get(src); | |
461 | + if (mp != null) { | |
462 | + mp.pause(); | |
463 | + src.setStatus(Status.Paused); | |
464 | + } else { | |
465 | + int channel = src.getChannel(); | |
466 | + if (channel != -1) { | |
467 | + soundPool.pause(channel); // is not very likley to make | |
468 | + } // something useful :) | |
469 | + } | |
470 | + } | |
471 | + | |
472 | + public void stopSource(AudioSource src) { | |
473 | + if (audioDisabled) { | |
474 | + return; | |
475 | + } | |
476 | + | |
477 | + // can be stream or buffer -> so try to get mediaplayer | |
478 | + // if there is non try to stop soundpool | |
479 | + MediaPlayer mp = musicPlaying.get(src); | |
480 | + if (mp != null) { | |
481 | + mp.stop(); | |
482 | + mp.reset(); | |
483 | + src.setStatus(Status.Stopped); | |
484 | + } else { | |
485 | + int channel = src.getChannel(); | |
486 | + if (channel != -1) { | |
487 | + soundPool.pause(channel); // is not very likley to make | |
488 | + // something useful :) | |
489 | + } | |
490 | + } | |
491 | + | |
492 | + } | |
493 | + | |
494 | + @Override | |
495 | + public void deleteAudioData(AudioData ad) { | |
496 | + | |
497 | + for (AudioSource src : musicPlaying.keySet()) { | |
498 | + if (src.getAudioData() == ad) { | |
499 | + MediaPlayer mp = musicPlaying.remove(src); | |
500 | + mp.stop(); | |
501 | + mp.release(); | |
502 | + src.setStatus(Status.Stopped); | |
503 | + src.setChannel(-1); | |
504 | + ad.setId(-1); | |
505 | + break; | |
506 | + } | |
507 | + } | |
508 | + | |
509 | + if (ad.getId() > 0) { | |
510 | + soundPool.unload(ad.getId()); | |
511 | + ad.setId(-1); | |
512 | + } | |
513 | + } | |
514 | + | |
515 | + @Override | |
516 | + public void setEnvironment(Environment env) { | |
517 | + // not yet supported | |
518 | + } | |
519 | + | |
520 | + @Override | |
521 | + public void deleteFilter(Filter filter) { | |
522 | + } | |
523 | +} |
@@ -44,7 +44,7 @@ import java.util.concurrent.atomic.AtomicBoolean; | ||
44 | 44 | import java.util.logging.Level; |
45 | 45 | import java.util.logging.Logger; |
46 | 46 | |
47 | -public class AndroidOpenALSoftAudioRenderer implements AudioRenderer, Runnable { | |
47 | +public class AndroidOpenALSoftAudioRenderer implements AndroidAudioRenderer, Runnable { | |
48 | 48 | |
49 | 49 | private static final Logger logger = Logger.getLogger(AndroidOpenALSoftAudioRenderer.class.getName()); |
50 | 50 | private final NativeObjectManager objManager = new NativeObjectManager(); |
@@ -10,25 +10,25 @@ import com.jme3.asset.AndroidImageInfo; | ||
10 | 10 | import com.jme3.asset.AssetManager; |
11 | 11 | import com.jme3.audio.AudioRenderer; |
12 | 12 | import com.jme3.audio.android.AndroidAudioRenderer; |
13 | +import com.jme3.audio.android.AndroidMediaPlayerAudioRenderer; | |
14 | +import com.jme3.audio.android.AndroidOpenALSoftAudioRenderer; | |
13 | 15 | import com.jme3.system.*; |
14 | 16 | import com.jme3.system.JmeContext.Type; |
15 | 17 | import com.jme3.texture.Image; |
16 | 18 | import com.jme3.texture.image.DefaultImageRaster; |
17 | 19 | import com.jme3.texture.image.ImageRaster; |
18 | 20 | import com.jme3.util.AndroidScreenshots; |
19 | -import com.jme3.util.JmeFormatter; | |
20 | 21 | import java.io.File; |
21 | 22 | import java.io.IOException; |
22 | 23 | import java.io.OutputStream; |
23 | 24 | import java.net.URL; |
24 | 25 | import java.nio.ByteBuffer; |
25 | -import java.util.logging.Handler; | |
26 | 26 | import java.util.logging.Level; |
27 | -import java.util.logging.Logger; | |
28 | 27 | |
29 | 28 | public class JmeAndroidSystem extends JmeSystemDelegate { |
30 | 29 | |
31 | 30 | private static Activity activity; |
31 | + private static String audioRendererType = AppSettings.ANDROID_MEDIAPLAYER; | |
32 | 32 | |
33 | 33 | static { |
34 | 34 | try { |
@@ -97,6 +97,16 @@ public class JmeAndroidSystem extends JmeSystemDelegate { | ||
97 | 97 | |
98 | 98 | @Override |
99 | 99 | public JmeContext newContext(AppSettings settings, Type contextType) { |
100 | + if (settings.getAudioRenderer().equals(AppSettings.ANDROID_MEDIAPLAYER)) { | |
101 | + logger.log(Level.INFO, "newContext settings set to Android MediaPlayer / SoundPool"); | |
102 | + audioRendererType = AppSettings.ANDROID_MEDIAPLAYER; | |
103 | + } else if (settings.getAudioRenderer().equals(AppSettings.ANDROID_OPENAL_SOFT)) { | |
104 | + logger.log(Level.INFO, "newContext settings set to Android OpenAL Soft"); | |
105 | + audioRendererType = AppSettings.ANDROID_OPENAL_SOFT; | |
106 | + } else { | |
107 | + logger.log(Level.INFO, "AudioRenderer not set. Defaulting to Android MediaPlayer / SoundPool"); | |
108 | + audioRendererType = AppSettings.ANDROID_MEDIAPLAYER; | |
109 | + } | |
100 | 110 | initialize(settings); |
101 | 111 | JmeContext ctx = new OGLESContext(); |
102 | 112 | ctx.setSettings(settings); |
@@ -105,7 +115,20 @@ public class JmeAndroidSystem extends JmeSystemDelegate { | ||
105 | 115 | |
106 | 116 | @Override |
107 | 117 | public AudioRenderer newAudioRenderer(AppSettings settings) { |
108 | - return new AndroidAudioRenderer(activity); | |
118 | + | |
119 | + if (settings.getAudioRenderer().equals(AppSettings.ANDROID_MEDIAPLAYER)) { | |
120 | + logger.log(Level.INFO, "newAudioRenderer settings set to Android MediaPlayer / SoundPool"); | |
121 | + audioRendererType = AppSettings.ANDROID_MEDIAPLAYER; | |
122 | + return new AndroidMediaPlayerAudioRenderer(activity); | |
123 | + } else if (settings.getAudioRenderer().equals(AppSettings.ANDROID_OPENAL_SOFT)) { | |
124 | + logger.log(Level.INFO, "newAudioRenderer settings set to Android OpenAL Soft"); | |
125 | + audioRendererType = AppSettings.ANDROID_OPENAL_SOFT; | |
126 | + return new AndroidOpenALSoftAudioRenderer(); | |
127 | + } else { | |
128 | + logger.log(Level.INFO, "AudioRenderer not set. Defaulting to Android MediaPlayer / SoundPool"); | |
129 | + audioRendererType = AppSettings.ANDROID_MEDIAPLAYER; | |
130 | + return new AndroidMediaPlayerAudioRenderer(activity); | |
131 | + } | |
109 | 132 | } |
110 | 133 | |
111 | 134 | @Override |
@@ -198,4 +221,8 @@ public class JmeAndroidSystem extends JmeSystemDelegate { | ||
198 | 221 | public static Activity getActivity() { |
199 | 222 | return activity; |
200 | 223 | } |
224 | + | |
225 | + public static String getAudioRendererType() { | |
226 | + return audioRendererType; | |
227 | + } | |
201 | 228 | } |
@@ -42,69 +42,88 @@ import java.util.prefs.Preferences; | ||
42 | 42 | |
43 | 43 | /** |
44 | 44 | * <code>AppSettings</code> provides a store of configuration |
45 | - * to be used by the application. | |
45 | + * to be used by the application. | |
46 | 46 | * <p> |
47 | 47 | * By default only the {@link JmeContext context} uses the configuration, |
48 | - * however the user may set and retrieve the settings as well. | |
49 | - * The settings can be stored either in the Java preferences | |
48 | + * however the user may set and retrieve the settings as well. | |
49 | + * The settings can be stored either in the Java preferences | |
50 | 50 | * (using {@link #save(java.lang.String) } or |
51 | 51 | * a .properties file (using {@link #save(java.io.OutputStream) }. |
52 | - * | |
52 | + * | |
53 | 53 | * @author Kirill Vainer |
54 | 54 | */ |
55 | 55 | public final class AppSettings extends HashMap<String, Object> { |
56 | 56 | |
57 | 57 | private static final AppSettings defaults = new AppSettings(false); |
58 | - | |
58 | + | |
59 | 59 | /** |
60 | 60 | * Use LWJGL as the display system and force using the OpenGL1.1 renderer. |
61 | - * | |
62 | - * @see AppSettings#setRenderer(java.lang.String) | |
61 | + * | |
62 | + * @see AppSettings#setRenderer(java.lang.String) | |
63 | 63 | */ |
64 | 64 | public static final String LWJGL_OPENGL1 = "LWJGL-OPENGL1"; |
65 | - | |
65 | + | |
66 | 66 | /** |
67 | 67 | * Use LWJGL as the display system and force using the OpenGL2.0 renderer. |
68 | 68 | * <p> |
69 | 69 | * If the underlying system does not support OpenGL2.0, then the context |
70 | 70 | * initialization will throw an exception. |
71 | - * | |
72 | - * @see AppSettings#setRenderer(java.lang.String) | |
71 | + * | |
72 | + * @see AppSettings#setRenderer(java.lang.String) | |
73 | 73 | */ |
74 | 74 | public static final String LWJGL_OPENGL2 = "LWJGL-OpenGL2"; |
75 | - | |
75 | + | |
76 | 76 | /** |
77 | 77 | * Use LWJGL as the display system and force using the core OpenGL3.3 renderer. |
78 | 78 | * <p> |
79 | 79 | * If the underlying system does not support OpenGL3.2, then the context |
80 | 80 | * initialization will throw an exception. Note that currently jMonkeyEngine |
81 | - * does not have any shaders that support OpenGL3.2 therefore this | |
81 | + * does not have any shaders that support OpenGL3.2 therefore this | |
82 | 82 | * option is not useful. |
83 | 83 | * <p> |
84 | 84 | * Note: OpenGL 3.2 is used to give 3.x support to Mac users. |
85 | - * | |
86 | - * @see AppSettings#setRenderer(java.lang.String) | |
85 | + * | |
86 | + * @see AppSettings#setRenderer(java.lang.String) | |
87 | 87 | */ |
88 | 88 | public static final String LWJGL_OPENGL3 = "LWJGL-OpenGL3"; |
89 | - | |
89 | + | |
90 | 90 | /** |
91 | - * Use LWJGL as the display system and allow the context | |
91 | + * Use LWJGL as the display system and allow the context | |
92 | 92 | * to choose an appropriate renderer based on system capabilities. |
93 | 93 | * <p> |
94 | 94 | * If the GPU supports OpenGL2 or later, then the OpenGL2.0 renderer will |
95 | 95 | * be used, otherwise, the OpenGL1.1 renderer is used. |
96 | - * | |
97 | - * @see AppSettings#setRenderer(java.lang.String) | |
96 | + * | |
97 | + * @see AppSettings#setRenderer(java.lang.String) | |
98 | 98 | */ |
99 | 99 | public static final String LWJGL_OPENGL_ANY = "LWJGL-OpenGL-Any"; |
100 | - | |
100 | + | |
101 | 101 | /** |
102 | 102 | * Use the LWJGL OpenAL based renderer for audio capabilities. |
103 | - * | |
104 | - * @see AppSettings#setAudioRenderer(java.lang.String) | |
103 | + * | |
104 | + * @see AppSettings#setAudioRenderer(java.lang.String) | |
105 | 105 | */ |
106 | 106 | public static final String LWJGL_OPENAL = "LWJGL"; |
107 | 107 | |
108 | + /** | |
109 | + * Use the Android MediaPlayer / SoundPool based renderer for Android audio capabilities. | |
110 | + * <p> | |
111 | + * NOTE: Supports Android 2.2+ platforms. This is the current default for | |
112 | + * Android platforms. | |
113 | + * | |
114 | + * @see AppSettings#setAudioRenderer(java.lang.String) | |
115 | + */ | |
116 | + public static final String ANDROID_MEDIAPLAYER = "MediaPlayer"; | |
117 | + | |
118 | + /** | |
119 | + * Use the OpenAL Soft based renderer for Android audio capabilities. | |
120 | + * <p> | |
121 | + * NOTE: Only to be used on Android 2.3+ platforms due to using OpenSL. | |
122 | + * | |
123 | + * @see AppSettings#setAudioRenderer(java.lang.String) | |
124 | + */ | |
125 | + public static final String ANDROID_OPENAL_SOFT = "OpenAL_SOFT"; | |
126 | + | |
108 | 127 | static { |
109 | 128 | defaults.put("Width", 640); |
110 | 129 | defaults.put("Height", 480); |
@@ -131,10 +150,10 @@ public final class AppSettings extends HashMap<String, Object> { | ||
131 | 150 | * Create a new instance of <code>AppSettings</code>. |
132 | 151 | * <p> |
133 | 152 | * If <code>loadDefaults</code> is true, then the default settings |
134 | - * will be set on the AppSettings. | |
153 | + * will be set on the AppSettings. | |
135 | 154 | * Use false if you want to change some settings but you would like the |
136 | 155 | * application to load settings from previous launches. |
137 | - * | |
156 | + * | |
138 | 157 | * @param loadDefaults If default settings are to be loaded. |
139 | 158 | */ |
140 | 159 | public AppSettings(boolean loadDefaults) { |
@@ -145,11 +164,11 @@ public final class AppSettings extends HashMap<String, Object> { | ||
145 | 164 | |
146 | 165 | /** |
147 | 166 | * Copies all settings from <code>other</code> to <code>this</code> |
148 | - * AppSettings. | |
167 | + * AppSettings. | |
149 | 168 | * <p> |
150 | 169 | * Any settings that are specified in other will overwrite settings |
151 | 170 | * set on this AppSettings. |
152 | - * | |
171 | + * | |
153 | 172 | * @param other The AppSettings to copy the settings from |
154 | 173 | */ |
155 | 174 | public void copyFrom(AppSettings other) { |
@@ -159,7 +178,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
159 | 178 | /** |
160 | 179 | * Same as {@link #copyFrom(com.jme3.system.AppSettings) }, except |
161 | 180 | * doesn't overwrite settings that are already set. |
162 | - * | |
181 | + * | |
163 | 182 | * @param other The AppSettings to merge the settings from |
164 | 183 | */ |
165 | 184 | public void mergeFrom(AppSettings other) { |
@@ -172,11 +191,11 @@ public final class AppSettings extends HashMap<String, Object> { | ||
172 | 191 | |
173 | 192 | /** |
174 | 193 | * Loads the settings from the given properties input stream. |
175 | - * | |
194 | + * | |
176 | 195 | * @param in The InputStream to load from |
177 | 196 | * @throws IOException If an IOException occurs |
178 | - * | |
179 | - * @see #save(java.io.OutputStream) | |
197 | + * | |
198 | + * @see #save(java.io.OutputStream) | |
180 | 199 | */ |
181 | 200 | public void load(InputStream in) throws IOException { |
182 | 201 | Properties props = new Properties(); |
@@ -207,11 +226,11 @@ public final class AppSettings extends HashMap<String, Object> { | ||
207 | 226 | |
208 | 227 | /** |
209 | 228 | * Saves all settings to the given properties output stream. |
210 | - * | |
229 | + * | |
211 | 230 | * @param out The OutputStream to write to |
212 | 231 | * @throws IOException If an IOException occurs |
213 | - * | |
214 | - * @see #load(java.io.InputStream) | |
232 | + * | |
233 | + * @see #load(java.io.InputStream) | |
215 | 234 | */ |
216 | 235 | public void save(OutputStream out) throws IOException { |
217 | 236 | Properties props = new Properties(); |
@@ -238,11 +257,11 @@ public final class AppSettings extends HashMap<String, Object> { | ||
238 | 257 | |
239 | 258 | /** |
240 | 259 | * Loads settings previously saved in the Java preferences. |
241 | - * | |
260 | + * | |
242 | 261 | * @param preferencesKey The preferencesKey previously used to save the settings. |
243 | 262 | * @throws BackingStoreException If an exception occurs with the preferences |
244 | - * | |
245 | - * @see #save(java.lang.String) | |
263 | + * | |
264 | + * @see #save(java.lang.String) | |
246 | 265 | */ |
247 | 266 | public void load(String preferencesKey) throws BackingStoreException { |
248 | 267 | Preferences prefs = Preferences.userRoot().node(preferencesKey); |
@@ -289,10 +308,10 @@ public final class AppSettings extends HashMap<String, Object> { | ||
289 | 308 | * On the Windows operating system, the preferences are saved in the registry |
290 | 309 | * at the following key:<br> |
291 | 310 | * <code>HKEY_CURRENT_USER\Software\JavaSoft\Prefs\[preferencesKey]</code> |
292 | - * | |
293 | - * @param preferencesKey The preferences key to save at. Generally the | |
294 | - * application's unique name. | |
295 | - * | |
311 | + * | |
312 | + * @param preferencesKey The preferences key to save at. Generally the | |
313 | + * application's unique name. | |
314 | + * | |
296 | 315 | * @throws BackingStoreException If an exception occurs with the preferences |
297 | 316 | */ |
298 | 317 | public void save(String preferencesKey) throws BackingStoreException { |
@@ -302,7 +321,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
302 | 321 | // purge any other parameters set in older versions of the app, so |
303 | 322 | // that they don't leak onto the AppSettings of newer versions. |
304 | 323 | prefs.clear(); |
305 | - | |
324 | + | |
306 | 325 | for (String key : keySet()) { |
307 | 326 | Object val = get(key); |
308 | 327 | if (val instanceof Integer) { |
@@ -314,12 +333,12 @@ public final class AppSettings extends HashMap<String, Object> { | ||
314 | 333 | } else if (val instanceof Boolean) { |
315 | 334 | prefs.putBoolean("B_" + key, (Boolean) val); |
316 | 335 | } |
317 | - // NOTE: Ignore any parameters of unsupported types instead | |
318 | - // of throwing exception. This is specifically for handling | |
336 | + // NOTE: Ignore any parameters of unsupported types instead | |
337 | + // of throwing exception. This is specifically for handling | |
319 | 338 | // BufferedImage which is used in setIcons(), as you do not |
320 | 339 | // want to export such data in the preferences. |
321 | 340 | } |
322 | - | |
341 | + | |
323 | 342 | // Ensure the data is properly written into preferences before |
324 | 343 | // continuing. |
325 | 344 | prefs.sync(); |
@@ -366,7 +385,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
366 | 385 | |
367 | 386 | return s; |
368 | 387 | } |
369 | - | |
388 | + | |
370 | 389 | /** |
371 | 390 | * Get a float from the settings. |
372 | 391 | * <p> |
@@ -401,51 +420,51 @@ public final class AppSettings extends HashMap<String, Object> { | ||
401 | 420 | public void putString(String key, String value) { |
402 | 421 | put(key, value); |
403 | 422 | } |
404 | - | |
423 | + | |
405 | 424 | /** |
406 | 425 | * Set a float on the settings. |
407 | 426 | */ |
408 | 427 | public void putFloat(String key, float value) { |
409 | 428 | put(key, Float.valueOf(value)); |
410 | 429 | } |
411 | - | |
430 | + | |
412 | 431 | /** |
413 | 432 | * Enable or disable mouse emulation on touchscreen based devices. |
414 | 433 | * This will convert taps on the touchscreen or movement of finger |
415 | 434 | * over touchscreen (only the first) into the appropriate mouse events. |
416 | - * | |
435 | + * | |
417 | 436 | * @param emulateMouse If mouse emulation should be enabled. |
418 | 437 | */ |
419 | 438 | public void setEmulateMouse(boolean emulateMouse) { |
420 | 439 | putBoolean("TouchEmulateMouse", emulateMouse); |
421 | 440 | } |
422 | - | |
441 | + | |
423 | 442 | /** |
424 | 443 | * Returns true if mouse emulation is enabled, false otherwise. |
425 | - * | |
444 | + * | |
426 | 445 | * @return Mouse emulation mode. |
427 | 446 | */ |
428 | 447 | public boolean isEmulateMouse() { |
429 | 448 | return getBoolean("TouchEmulateMouse"); |
430 | 449 | } |
431 | - | |
450 | + | |
432 | 451 | /** |
433 | 452 | * Specify if the X or Y (or both) axes should be flipped for emulated mouse. |
434 | - * | |
453 | + * | |
435 | 454 | * @param flipX Set to flip X axis |
436 | 455 | * @param flipY Set to flip Y axis |
437 | - * | |
438 | - * @see #setEmulateMouse(boolean) | |
456 | + * | |
457 | + * @see #setEmulateMouse(boolean) | |
439 | 458 | */ |
440 | 459 | public void setEmulateMouseFlipAxis(boolean flipX, boolean flipY) { |
441 | 460 | putBoolean("TouchEmulateMouseFlipX", flipX); |
442 | 461 | putBoolean("TouchEmulateMouseFlipY", flipY); |
443 | 462 | } |
444 | - | |
463 | + | |
445 | 464 | public boolean isEmulateMouseFlipX() { |
446 | 465 | return getBoolean("TouchEmulateMouseFlipX"); |
447 | 466 | } |
448 | - | |
467 | + | |
449 | 468 | public boolean isEmulateMouseFlipY() { |
450 | 469 | return getBoolean("TouchEmulateMouseFlipY"); |
451 | 470 | } |
@@ -484,7 +503,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
484 | 503 | * <li>AppSettings.LWJGL_OPENGL1 - Force OpenGL1.1 compatability</li> |
485 | 504 | * <li>AppSettings.LWJGL_OPENGL2 - Force OpenGL2 compatability</li> |
486 | 505 | * <li>AppSettings.LWJGL_OPENGL3 - Force OpenGL3.3 compatability</li> |
487 | - * <li>AppSettings.LWJGL_OPENGL_ANY - Choose an appropriate | |
506 | + * <li>AppSettings.LWJGL_OPENGL_ANY - Choose an appropriate | |
488 | 507 | * OpenGL version based on system capabilities</li> |
489 | 508 | * <li>null - Disable graphics rendering</li> |
490 | 509 | * </ul> |
@@ -496,7 +515,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
496 | 515 | } |
497 | 516 | |
498 | 517 | /** |
499 | - * Set a custom graphics renderer to use. The class should implement | |
518 | + * Set a custom graphics renderer to use. The class should implement | |
500 | 519 | * the {@link JmeContext} interface. |
501 | 520 | * @param clazz The custom context class. |
502 | 521 | * (Default: not set) |
@@ -511,7 +530,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
511 | 530 | * <li>AppSettings.LWJGL_OPENAL - Default for LWJGL</li> |
512 | 531 | * <li>null - Disable audio</li> |
513 | 532 | * </ul> |
514 | - * @param audioRenderer | |
533 | + * @param audioRenderer | |
515 | 534 | * (Default: LWJGL) |
516 | 535 | */ |
517 | 536 | public void setAudioRenderer(String audioRenderer) { |
@@ -573,10 +592,10 @@ public final class AppSettings extends HashMap<String, Object> { | ||
573 | 592 | setMinHeight(height); |
574 | 593 | } |
575 | 594 | |
576 | - | |
577 | - | |
595 | + | |
596 | + | |
578 | 597 | /** |
579 | - * Set the frequency, also known as refresh rate, for the | |
598 | + * Set the frequency, also known as refresh rate, for the | |
580 | 599 | * rendering display. |
581 | 600 | * @param value The frequency |
582 | 601 | * (Default: 60) |
@@ -593,13 +612,13 @@ public final class AppSettings extends HashMap<String, Object> { | ||
593 | 612 | * 16 bits. On some platforms 24 bits might not be supported, in that case, |
594 | 613 | * specify 16 bits.<p> |
595 | 614 | * (Default: 24) |
596 | - * | |
615 | + * | |
597 | 616 | * @param value The depth bits |
598 | 617 | */ |
599 | 618 | public void setDepthBits(int value){ |
600 | 619 | putInteger("DepthBits", value); |
601 | 620 | } |
602 | - | |
621 | + | |
603 | 622 | /** |
604 | 623 | * Set the number of stencil bits. |
605 | 624 | * <p> |
@@ -608,17 +627,17 @@ public final class AppSettings extends HashMap<String, Object> { | ||
608 | 627 | * the stencil buffer. |
609 | 628 | * </p> |
610 | 629 | * (Default: 0) |
611 | - * | |
630 | + * | |
612 | 631 | * @param value Number of stencil bits |
613 | 632 | */ |
614 | 633 | public void setStencilBits(int value){ |
615 | 634 | putInteger("StencilBits", value); |
616 | 635 | } |
617 | - | |
636 | + | |
618 | 637 | /** |
619 | 638 | * Set the bits per pixel for the display. Appropriate |
620 | 639 | * values are 16 for RGB565 color format, or 24 for RGB8 color format. |
621 | - * | |
640 | + * | |
622 | 641 | * @param value The bits per pixel to set |
623 | 642 | * (Default: 24) |
624 | 643 | */ |
@@ -630,7 +649,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
630 | 649 | * Set the number of samples per pixel. A value of 1 indicates |
631 | 650 | * each pixel should be single-sampled, higher values indicate |
632 | 651 | * a pixel should be multi-sampled. |
633 | - * | |
652 | + * | |
634 | 653 | * @param value The number of samples |
635 | 654 | * (Default: 1) |
636 | 655 | */ |
@@ -657,16 +676,16 @@ public final class AppSettings extends HashMap<String, Object> { | ||
657 | 676 | /** |
658 | 677 | * Set to true to enable vertical-synchronization, limiting and synchronizing |
659 | 678 | * every frame rendered to the monitor's refresh rate. |
660 | - * @param value | |
679 | + * @param value | |
661 | 680 | * (Default: false) |
662 | 681 | */ |
663 | 682 | public void setVSync(boolean value) { |
664 | 683 | putBoolean("VSync", value); |
665 | 684 | } |
666 | - | |
685 | + | |
667 | 686 | /** |
668 | 687 | * Enable 3D stereo. |
669 | - * <p>This feature requires hardware support from the GPU driver. | |
688 | + * <p>This feature requires hardware support from the GPU driver. | |
670 | 689 | * @see <a href="http://en.wikipedia.org/wiki/Quad_buffering">http://en.wikipedia.org/wiki/Quad_buffering</a><br /> |
671 | 690 | * Once enabled, filters or scene processors that handle 3D stereo rendering |
672 | 691 | * could use this feature to render using hardware 3D stereo.</p> |
@@ -693,16 +712,16 @@ public final class AppSettings extends HashMap<String, Object> { | ||
693 | 712 | public void setIcons(Object[] value) { |
694 | 713 | put("Icons", value); |
695 | 714 | } |
696 | - | |
715 | + | |
697 | 716 | /** |
698 | 717 | * Sets the path of the settings dialog image to use. |
699 | 718 | * <p> |
700 | - * The image will be displayed in the settings dialog when the | |
719 | + * The image will be displayed in the settings dialog when the | |
701 | 720 | * application is started. |
702 | 721 | * </p> |
703 | 722 | * (Default: /com/jme3/app/Monkey.png) |
704 | - * | |
705 | - * @param path The path to the image in the classpath. | |
723 | + * | |
724 | + * @param path The path to the image in the classpath. | |
706 | 725 | */ |
707 | 726 | public void setSettingsDialogImage(String path) { |
708 | 727 | putString("SettingsDialogImage", path); |
@@ -710,7 +729,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
710 | 729 | |
711 | 730 | /** |
712 | 731 | * Get the framerate. |
713 | - * @see #setFrameRate(int) | |
732 | + * @see #setFrameRate(int) | |
714 | 733 | */ |
715 | 734 | public int getFrameRate() { |
716 | 735 | return getInteger("FrameRate"); |
@@ -718,7 +737,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
718 | 737 | |
719 | 738 | /** |
720 | 739 | * Get the use input state. |
721 | - * @see #setUseInput(boolean) | |
740 | + * @see #setUseInput(boolean) | |
722 | 741 | */ |
723 | 742 | public boolean useInput() { |
724 | 743 | return getBoolean("UseInput"); |
@@ -726,7 +745,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
726 | 745 | |
727 | 746 | /** |
728 | 747 | * Get the renderer |
729 | - * @see #setRenderer(java.lang.String) | |
748 | + * @see #setRenderer(java.lang.String) | |
730 | 749 | */ |
731 | 750 | public String getRenderer() { |
732 | 751 | return getString("Renderer"); |
@@ -734,7 +753,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
734 | 753 | |
735 | 754 | /** |
736 | 755 | * Get the width |
737 | - * @see #setWidth(int) | |
756 | + * @see #setWidth(int) | |
738 | 757 | */ |
739 | 758 | public int getWidth() { |
740 | 759 | return getInteger("Width"); |
@@ -742,7 +761,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
742 | 761 | |
743 | 762 | /** |
744 | 763 | * Get the height |
745 | - * @see #setHeight(int) | |
764 | + * @see #setHeight(int) | |
746 | 765 | */ |
747 | 766 | public int getHeight() { |
748 | 767 | return getInteger("Height"); |
@@ -750,7 +769,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
750 | 769 | |
751 | 770 | /** |
752 | 771 | * Get the width |
753 | - * @see #setWidth(int) | |
772 | + * @see #setWidth(int) | |
754 | 773 | */ |
755 | 774 | public int getMinWidth() { |
756 | 775 | return getInteger("MinWidth"); |
@@ -758,7 +777,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
758 | 777 | |
759 | 778 | /** |
760 | 779 | * Get the height |
761 | - * @see #setHeight(int) | |
780 | + * @see #setHeight(int) | |
762 | 781 | */ |
763 | 782 | public int getMinHeight() { |
764 | 783 | return getInteger("MinHeight"); |
@@ -766,7 +785,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
766 | 785 | |
767 | 786 | /** |
768 | 787 | * Get the bits per pixel |
769 | - * @see #setBitsPerPixel(int) | |
788 | + * @see #setBitsPerPixel(int) | |
770 | 789 | */ |
771 | 790 | public int getBitsPerPixel() { |
772 | 791 | return getInteger("BitsPerPixel"); |
@@ -774,7 +793,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
774 | 793 | |
775 | 794 | /** |
776 | 795 | * Get the frequency |
777 | - * @see #setFrequency(int) | |
796 | + * @see #setFrequency(int) | |
778 | 797 | */ |
779 | 798 | public int getFrequency() { |
780 | 799 | return getInteger("Frequency"); |
@@ -790,7 +809,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
790 | 809 | |
791 | 810 | /** |
792 | 811 | * Get the number of stencil bits |
793 | - * @see #setStencilBits(int) | |
812 | + * @see #setStencilBits(int) | |
794 | 813 | */ |
795 | 814 | public int getStencilBits() { |
796 | 815 | return getInteger("StencilBits"); |
@@ -798,7 +817,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
798 | 817 | |
799 | 818 | /** |
800 | 819 | * Get the number of samples |
801 | - * @see #setSamples(int) | |
820 | + * @see #setSamples(int) | |
802 | 821 | */ |
803 | 822 | public int getSamples() { |
804 | 823 | return getInteger("Samples"); |
@@ -806,7 +825,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
806 | 825 | |
807 | 826 | /** |
808 | 827 | * Get the application title |
809 | - * @see #setTitle(java.lang.String) | |
828 | + * @see #setTitle(java.lang.String) | |
810 | 829 | */ |
811 | 830 | public String getTitle() { |
812 | 831 | return getString("Title"); |
@@ -814,7 +833,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
814 | 833 | |
815 | 834 | /** |
816 | 835 | * Get the vsync state |
817 | - * @see #setVSync(boolean) | |
836 | + * @see #setVSync(boolean) | |
818 | 837 | */ |
819 | 838 | public boolean isVSync() { |
820 | 839 | return getBoolean("VSync"); |
@@ -822,7 +841,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
822 | 841 | |
823 | 842 | /** |
824 | 843 | * Get the fullscreen state |
825 | - * @see #setFullscreen(boolean) | |
844 | + * @see #setFullscreen(boolean) | |
826 | 845 | */ |
827 | 846 | public boolean isFullscreen() { |
828 | 847 | return getBoolean("Fullscreen"); |
@@ -830,7 +849,7 @@ public final class AppSettings extends HashMap<String, Object> { | ||
830 | 849 | |
831 | 850 | /** |
832 | 851 | * Get the use joysticks state |
833 | - * @see #setUseJoysticks(boolean) | |
852 | + * @see #setUseJoysticks(boolean) | |
834 | 853 | */ |
835 | 854 | public boolean useJoysticks() { |
836 | 855 | return !getBoolean("DisableJoysticks"); |
@@ -838,31 +857,31 @@ public final class AppSettings extends HashMap<String, Object> { | ||
838 | 857 | |
839 | 858 | /** |
840 | 859 | * Get the audio renderer |
841 | - * @see #setAudioRenderer(java.lang.String) | |
860 | + * @see #setAudioRenderer(java.lang.String) | |
842 | 861 | */ |
843 | 862 | public String getAudioRenderer() { |
844 | 863 | return getString("AudioRenderer"); |
845 | 864 | } |
846 | - | |
865 | + | |
847 | 866 | /** |
848 | 867 | * Get the stereo 3D state |
849 | - * @see #setStereo3D(boolean) | |
868 | + * @see #setStereo3D(boolean) | |
850 | 869 | */ |
851 | 870 | public boolean useStereo3D(){ |
852 | - return getBoolean("Stereo3D"); | |
871 | + return getBoolean("Stereo3D"); | |
853 | 872 | } |
854 | 873 | |
855 | 874 | /** |
856 | 875 | * Get the icon array |
857 | - * @see #setIcons(java.lang.Object[]) | |
876 | + * @see #setIcons(java.lang.Object[]) | |
858 | 877 | */ |
859 | 878 | public Object[] getIcons() { |
860 | 879 | return (Object[]) get("Icons"); |
861 | 880 | } |
862 | - | |
881 | + | |
863 | 882 | /** |
864 | 883 | * Get the settings dialog image |
865 | - * @see #setSettingsDialogImage(java.lang.String) | |
884 | + * @see #setSettingsDialogImage(java.lang.String) | |
866 | 885 | */ |
867 | 886 | public String getSettingsDialogImage() { |
868 | 887 | return getString("SettingsDialogImage"); |