Search in sources :

Example 96 with AudioFormat

use of javax.sound.sampled.AudioFormat in project algorithms-learning by brianway.

the class StdAudio method init.

// open up an audio stream
private static void init() {
    try {
        // 44,100 samples per second, 16-bit audio, mono, signed PCM, little Endian
        AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false);
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE);
        // the internal buffer is a fraction of the actual buffer size, this choice is arbitrary
        // it gets divided because we can't expect the buffered data to line up exactly with when
        // the sound card decides to push out its samples.
        buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3];
    } catch (LineUnavailableException e) {
        System.out.println(e.getMessage());
        System.exit(1);
    }
    // no sound gets made before this call
    line.start();
}
Also used : DataLine(javax.sound.sampled.DataLine) SourceDataLine(javax.sound.sampled.SourceDataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) AudioFormat(javax.sound.sampled.AudioFormat)

Example 97 with AudioFormat

use of javax.sound.sampled.AudioFormat in project algorithms-learning by brianway.

the class StdAudio method save.

/**
 * Saves the double array as an audio file (using .wav or .au format).
 *
 * @param filename the name of the audio file
 * @param samples the array of samples
 */
public static void save(String filename, double[] samples) {
    // assumes 44,100 samples per second
    // use 16-bit audio, mono, signed PCM, little Endian
    AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, 1, true, false);
    byte[] data = new byte[2 * samples.length];
    for (int i = 0; i < samples.length; i++) {
        int temp = (short) (samples[i] * MAX_16_BIT);
        data[2 * i + 0] = (byte) temp;
        data[2 * i + 1] = (byte) (temp >> 8);
    }
    // now save the file
    try {
        ByteArrayInputStream bais = new ByteArrayInputStream(data);
        AudioInputStream ais = new AudioInputStream(bais, format, samples.length);
        if (filename.endsWith(".wav") || filename.endsWith(".WAV")) {
            AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename));
        } else if (filename.endsWith(".au") || filename.endsWith(".AU")) {
            AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename));
        } else {
            throw new RuntimeException("File format not supported: " + filename);
        }
    } catch (IOException e) {
        System.out.println(e);
        System.exit(1);
    }
}
Also used : AudioInputStream(javax.sound.sampled.AudioInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) IOException(java.io.IOException) AudioFormat(javax.sound.sampled.AudioFormat) File(java.io.File)

Example 98 with AudioFormat

use of javax.sound.sampled.AudioFormat in project narchy by automenta.

the class P method run.

@Override
public void run() {
    try {
        {
            // =====================================================================================================
            // Setup audio
            // =====================================================================================================
            final AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, SAMPLE_RATE, 16, 1, 2, SAMPLE_RATE, false);
            final SourceDataLine line = AudioSystem.getSourceDataLine(format);
            line.open(format, BUFFER_SIZE);
            line.start();
            final byte[] out = new byte[BUFFER_SIZE];
            final int[][] delay = new int[NUM_TRACKS][DELAY_SIZE];
            int index;
            int sequence = 0;
            // =====================================================================================================
            // Song data
            // =====================================================================================================
            final int[][] song = new int[16][32];
            for (index = 0; index < song_data.length(); index++) {
                final int j = (2 * index) & 0x1f;
                song[index >> 4][j + 0] = (byte) (song_data.charAt(index) >> 8);
                song[index >> 4][j + 1] = (byte) (song_data.charAt(index) & 0xff);
            }
            // Generate wave forms
            final int[][] wave = new int[3][WAVE_BUFFER];
            for (index = 0; index < WAVE_BUFFER; index++) {
                // wave[0][index] = FP_S1 - ((index & (WAVE_BUFFER / 2)) << 1);
                wave[0][index] = (int) (FP_S1 * (float) Math.sin(index * (2f * PI / WAVE_BUFFER)));
                wave[1][index] = index < (WAVE_BUFFER >> 1) ? FP_S1 : -FP_S1;
                wave[2][index] = (int) (FP_U1 * (float) Math.random()) - FP_S1;
            // wave[1][index] = index - FP_S1;
            }
            new Thread(new Runnable() {

                int sequence;

                @Override
                public void run() {
                    while (true) {
                        for (int track = 0; track < NUM_TRACKS; track++) {
                            final int[] ins = song[INSTRUMENT_OFFSET + track];
                            ins[ENV_LEVEL + 1] = FP_U1;
                            // Setup envelope
                            final int pattern = song[SEQUENCE_OFFSET + (track >> 1)][((track & 1) * SEQ_LENGTH) + (sequence >> 5)];
                            final int note = song[PATTERN_OFFSET + pattern][sequence & 0x1f];
                            if (note == 1 && ins[ENV_STAGE] < (2 << FP)) {
                                ins[ENV_STAGE] = (2 << FP);
                            }
                            if (note > 1) {
                                ins[OSC1_RATE] = frequencies.charAt((note - 2 + ins[PITCH]) & 0xf) << ((note - 2 + ins[PITCH]) >> 4);
                                ins[ENV_STAGE] = 0;
                            }
                            for (int index = 0, offset = 0; index < SAMPLES_PER_TICK; index++, offset += 2) {
                                if (track == 0) {
                                    // Clear output buffer
                                    out[offset] = 0;
                                    out[offset + 1] = 0;
                                }
                                // Generate envelope
                                final int stage = ins[ENV_STAGE] >> FP;
                                ins[ENV_STAGE] += ins[ENV_RATE + stage];
                                // Generate oscillators
                                ins[OSC1_PHASE] += ins[OSC1_RATE];
                                int value = wave[ins[OSC_TYPE]][ins[OSC1_PHASE] & WAVE_BUFFER_MASK];
                                final int env = ins[ENV_LEVEL + stage] + (int) ((long) (ins[ENV_LEVEL + stage + 1] - ins[ENV_LEVEL + stage]) * (ins[ENV_STAGE] & FP_U1) >> FP);
                                value = (((value * env) >> (FP + 2)) * ins[VOLUME]) >> 6;
                                // add delay
                                value += ((delay[track][(ins[DELAY_POS] - ((ins[DELAY] * SAMPLES_PER_TICK >> 1))) & DELAY_MASK]) * ins[DELAY_MIX]) / 128;
                                delay[track][ins[DELAY_POS]] = (value * ins[DELAY_FEEDBACK]) / 128;
                                out[offset + 0] += (byte) value;
                                out[offset + 1] += (byte) (value >> 8);
                                ins[DELAY_POS] = (ins[DELAY_POS] + 1) & DELAY_MASK;
                            }
                        }
                        int index = 0;
                        while (index < BUFFER_SIZE) {
                            index += line.write(out, index, BUFFER_SIZE - index);
                        }
                        sequence = (sequence + 1) & SEQ_WRAP;
                    }
                }
            }).start();
        }
        // Set up the graphics stuff, double-buffering.
        final BufferedImage screen = new BufferedImage(WIDTH, HEIGHT, BufferedImage.TYPE_INT_RGB);
        final Graphics g = screen.getGraphics();
        final Graphics appletGraphics = getGraphics();
        final Font smallFont = g.getFont();
        final Font bigFont = smallFont.deriveFont(0, 50);
        // player state variables
        final float[] player = new float[9];
        final float[] intersect = new float[32];
        final int[] faceX = new int[32];
        final int[] faceY = new int[32];
        float[][] solids = null;
        float[] level = null;
        // float[] plates = null;
        // number of portals shot this level
        int portals = 0;
        int levelCount = 0;
        int gameState = 0;
        int animation = 0;
        // Colors
        final Color[] color = new Color[str_colors.length() >> 1];
        for (int i = 0; i < color.length; i++) {
            color[i] = new Color((str_colors.charAt(2 * i + 0) << 16) + str_colors.charAt(2 * i + 1));
        }
        int i, k;
        int j = data.length();
        final int[] level_data = new int[j * 2];
        for (i = 0; i < j; i++) {
            // & 0xff;
            level_data[2 * i + 0] = (data.charAt(i) >> 8);
            level_data[2 * i + 1] = (data.charAt(i)) & 0xff;
        }
        final int num_levels = level_data[1] >> 1;
        // Some variables to use for the fps.
        float dt, lastTime = System.nanoTime() / NANOTIME;
        // Game loop.
        mainLoop: while (true) {
            final float time = System.nanoTime() / NANOTIME;
            dt = time - lastTime;
            dt = dt > 0.05f ? 0.05f : dt;
            dt = dt < 0.01f ? 0.01f : dt;
            lastTime = time;
            /**
             *********************
             * State initialization
             **********************
             */
            if ((int) player[Y] > (HEIGHT - BORDER - (DOOR_WIDTH / 2)) && (int) player[X] > (WIDTH - BORDER - DOOR_WIDTH)) {
                if (levelCount + 1 == num_levels) {
                    gameState = STATE_WAIT_FOR_INPUT + STATE_NO_UPDATE + STATE_GAME_COMPLETE;
                } else {
                    levelCount++;
                    level = null;
                }
            }
            if (player[Y] > HEIGHT) {
                gameState = STATE_WAIT_FOR_INPUT + STATE_NO_UPDATE + STATE_DIED;
                level = null;
            }
            // SECTION: State
            if (level == null) {
                player[X] = ENTRY_X + DOOR_WIDTH / 2;
                player[Y] = HEIGHT - BORDER - PLAYER_WALL_BOUNDING;
                player[AIMX] = player[X] + 10;
                player[AIMY] = player[Y];
                player[VX] = 0;
                player[VY] = 0;
                portals = 0;
                if (gameState == 0) {
                    gameState = STATE_WAIT_FOR_INPUT + STATE_NO_UPDATE + STATE_LEVEL_START;
                }
                // offset of the level in the data array
                int offset = (level_data[2 * levelCount + 0] << 8) + (level_data[2 * levelCount + 1]);
                // number of lines in all shapes
                final int length = level_data[offset++];
                final int shapes = level_data[offset++];
                int index = LEVEL_DATA;
                level = new float[length * COMPONENTS + index];
                solids = new float[shapes][];
                level[EXIT + X] = WIDTH - BORDER;
                level[EXIT + Y] = HEIGHT - BORDER;
                level[EXIT + X2] = WIDTH - BORDER - DOOR_WIDTH;
                level[EXIT + Y2] = HEIGHT - BORDER;
                k = 12;
                // skip lines in shape
                int point = offset + shapes;
                for (i = 0; i < shapes; i++, k = 0) {
                    final int linesInShape = level_data[offset + i];
                    int last = point + 2 * linesInShape - 2;
                    solids[i] = new float[2 * linesInShape + k];
                    for (j = 0; j < linesInShape; j++, last = point, point += 2, index += COMPONENTS) {
                        level[index + X] = 10 * (level_data[last + 0] & 0x7f);
                        level[index + Y] = 10 * (level_data[last + 1] & 0x7f);
                        level[index + X2] = 10 * (level_data[point + 0] & 0x7f);
                        level[index + Y2] = 10 * (level_data[point + 1] & 0x7f);
                        level[index + DEAD] = (level_data[point + 0] >> 7);
                        level[index + DIE] = (level_data[point + 1] >> 7);
                        solids[i][2 * j + X] = level[index + X];
                        solids[i][2 * j + Y] = level[index + Y];
                        final float dx = level[index + X2] - level[index + X];
                        final float dy = level[index + Y2] - level[index + Y];
                        final float len = (float) Math.sqrt(dx * dx + dy * dy);
                        level[index + LENGTH] = len;
                        level[index + DX] = dx / len;
                        level[index + DY] = dy / len;
                    }
                    if (i == 0) {
                        j = 2 * linesInShape;
                        solids[0][j + X] = solids[0][X];
                        solids[i][j + Y] = solids[i][Y];
                        // solidsX[i][n + 1] = 0;
                        // solidsY[i][n + 1] = 0;
                        j += 4;
                        solids[i][j + X] = WIDTH;
                        // solids[i][j + Y] = 0;
                        j += 2;
                        solids[i][j + X] = WIDTH;
                        solids[i][j + Y] = HEIGHT;
                        j += 2;
                        // solids[i][j + X] = 0;
                        solids[i][j + Y] = HEIGHT;
                    }
                }
            // final int rand = levelCount + 5;
            // SECTION: Create plates
            // plates = new float[NUM_PLATES * COMPONENTS];
            // for (i = 0; i < plates.length; i += COMPONENTS) {
            // for (j = 0; j < 4; j++) {
            // rand *= 16807;
            // intersect[j] = (rand * 4.6566129e-010f + 1) / 2;
            // }
            // plates[i + X] = (int) (intersect[1] * (WIDTH - 128));
            // plates[i + Y] = (int) (intersect[2] * (HEIGHT - 128));
            // plates[i + X2] = plates[i + X] + (int) (intersect[0] * 128) + 64;
            // plates[i + Y2] = plates[i + Y] + (int) (intersect[0] * 128) + 64;
            // 
            // final int a = (int) (intersect[3] * 25) + 128;
            // plates[i + DX] = (a << 16) | (a << 8) | a;
            // }
            }
            /**
             *******************
             * UPDATE
             ********************
             */
            // SECTION: Update
            int onGround = 0;
            // Player line intersection
            for (i = 0; i < level.length; i += COMPONENTS) {
                // TODO: remove and use if (0 < position instead
                // if (i == LEVEL_DATA) {
                // continue;
                // }
                final float x = level[i + X];
                final float y = level[i + Y];
                final float length = level[i + LENGTH];
                final float lx = level[i + DX];
                final float ly = level[i + DY];
                final float position = ((player[X] - x) * lx + (player[Y] - y) * ly);
                final float ax = x + position * lx;
                final float ay = y + position * ly;
                if (0 < position && position <= length) {
                    final float dx = ax - player[X];
                    final float dy = ay - player[Y];
                    if (dx * dx + dy * dy <= PLAYER_WALL_BOUNDING_2) {
                        if (i <= PORTAL2) {
                            if (player[BLOCKTIME] > time || portals < 3) {
                                // Too soon to pass through a portal again
                                continue;
                            }
                            // The player has collided into a portal
                            final int direction = i / COMPONENTS;
                            final int dstPortal = (1 - direction) * COMPONENTS;
                            player[BLOCKTIME] = time + PORTAL_BLOCK_TIME;
                            final float ddx = level[dstPortal + DX];
                            final float ddy = level[dstPortal + DY];
                            player[X] = level[dstPortal + X] + ddx * HALF_PORTAL_WIDTH - ddy * (PLAYER_WALL_BOUNDING + 2);
                            player[Y] = level[dstPortal + Y] + ddy * HALF_PORTAL_WIDTH + ddx * (PLAYER_WALL_BOUNDING + 2);
                            final float cosR = (float) Math.cos(2 * PI * direction - player[ROTATION]);
                            final float sinR = (float) Math.sin(2 * PI * direction - player[ROTATION]);
                            final float vx = player[VX] * cosR - player[VY] * sinR;
                            player[VY] = player[VX] * sinR + player[VY] * cosR;
                            player[VX] = vx;
                            break;
                        } else {
                            if ((int) level[i + DIE] != 0) {
                                level = null;
                                gameState = STATE_WAIT_FOR_INPUT + STATE_NO_UPDATE + STATE_DIED;
                                continue mainLoop;
                            }
                            // Line segment normal
                            final float nx = -ly;
                            final float ny = lx;
                            // The player has collided into some wall object
                            if (ny > 0) {
                                // The roof
                                player[VY] = 0;
                            } else if (ny < 0) {
                                // some upward surface
                                player[VY] = 0;
                                player[Y] = ay - PLAYER_WALL_BOUNDING;
                                onGround = 1;
                            // player[Y] += ny * Math.sqrt(PLAYER_WALL_BOUNDING_2 - dist);
                            } else {
                                // A wall
                                player[VX] = (player[VX] * nx < 0) ? 0 : player[VX];
                                player[X] = x + nx * PLAYER_WALL_BOUNDING;
                            }
                        }
                    }
                }
            }
            // SECTION: Input
            if ((gameState & STATE_WAIT_FOR_INPUT) != 0 && key[SPACE] != 0) {
                if ((gameState & STATE_GAME_COMPLETE) != 0) {
                    levelCount = -1;
                    gameState = STATE_WAIT_FOR_INPUT + STATE_NO_UPDATE + STATE_LEVEL_START;
                } else {
                    gameState = 0;
                }
                key[SPACE] = 0;
            }
            if (key[RIGHT] + key[LEFT] != 0 && player[ANIMATIONTIME] - time <= 0) {
                player[ANIMATIONTIME] = time + ANIMATION_TIME;
                animation = (animation + 1) & 1;
            }
            if (onGround == 1) {
                // on the ground
                player[VX] = (player[VX] + (key[RIGHT] - key[LEFT]) * GROUND_SPEED) * FRICTION;
                player[VY] = JUMP_SPEED * (key[UP] | key[SPACE]);
            } else {
                // in the air
                player[VY] += GRAVITY * dt;
                player[VX] += (key[RIGHT] - key[LEFT]) * AIR_SPEED;
                if (key[RIGHT] + key[LEFT] != 0) {
                    player[VX] *= FRICTION_AIR;
                }
            }
            if ((gameState & STATE_NO_UPDATE) == 0) {
                player[X] += player[VX] * dt;
                player[Y] += player[VY] * dt;
                // Aim
                player[AIMX] = key[MOUSE_X];
                player[AIMY] = key[MOUSE_Y];
                final float x3 = player[X];
                final float y3 = player[Y];
                final float x4 = player[X2];
                final float y4 = player[Y2];
                intersect[X] = 0;
                intersect[Y] = 0;
                float closest = 0x1000;
                for (i = LEVEL_DATA; i < level.length; i += COMPONENTS) {
                    final float x1 = level[i + X];
                    final float y1 = level[i + Y];
                    final float x2 = level[i + X2];
                    final float y2 = level[i + Y2];
                    final float denom = (y4 - y3) * (x2 - x1) - (x4 - x3) * (y2 - y1);
                    if (denom != 0) {
                        final float t0 = ((x4 - x3) * (y1 - y3) - (y4 - y3) * (x1 - x3)) / denom;
                        if (0 <= t0 && t0 <= 1) {
                            final float t1 = ((x2 - x1) * (y1 - y3) - (y2 - y1) * (x1 - x3)) / denom;
                            if (t1 >= 0 && t1 < closest) {
                                closest = t1;
                                intersect[X] = x1 + t0 * (x2 - x1);
                                intersect[Y] = y1 + t0 * (y2 - y1);
                                intersect[RAY_T] = t1;
                                intersect[LINE_T] = t0;
                                intersect[LINE_INDEX] = i;
                            }
                        }
                    }
                }
                if (key[MOUSE_BUTTON] != 0) {
                    // SECTION: Shoot portal
                    // intersect(player, level, intersect);
                    // private static void intersect(final float[] line, final float[] lines,
                    // final float[] dest)
                    final int line = (int) intersect[LINE_INDEX];
                    if ((int) level[line + DEAD] + (int) level[line + DIE] == 0) {
                        float x1, y1, x2, y2;
                        final float dx = level[line + DX] * HALF_PORTAL_WIDTH;
                        final float dy = level[line + DY] * HALF_PORTAL_WIDTH;
                        x1 = intersect[X] - dx;
                        y1 = intersect[Y] - dy;
                        x2 = intersect[X] + dx;
                        y2 = intersect[Y] + dy;
                        if (intersect[LINE_T] * level[line + LENGTH] < HALF_PORTAL_WIDTH) {
                            x1 = level[line + X];
                            y1 = level[line + Y];
                            x2 = x1 + dx * 2;
                            y2 = y1 + dy * 2;
                        }
                        if ((1 - intersect[LINE_T]) * level[line + LENGTH] < HALF_PORTAL_WIDTH) {
                            x2 = level[line + X2];
                            y2 = level[line + Y2];
                            x1 = x2 - dx * 2;
                            y1 = y2 - dy * 2;
                        }
                        final int index = COMPONENTS * (key[MOUSE_BUTTON] - 1);
                        final int other = PORTAL2 - index;
                        boolean apply = true;
                        portals |= 1 << (key[MOUSE_BUTTON] - 1);
                        if (portals == 3 && level[other + INDEX] == line) {
                            final float Dx = x1 - level[other + X];
                            final float Dy = y1 - level[other + Y];
                            apply = (Dx * Dx + Dy * Dy >= PORTAL_WIDTH_2);
                        }
                        if (apply) {
                            level[index + X] = x1;
                            level[index + Y] = y1;
                            level[index + X2] = x2;
                            level[index + Y2] = y2;
                            level[index + LENGTH] = 2 * HALF_PORTAL_WIDTH;
                            level[index + DX] = level[line + DX];
                            level[index + DY] = level[line + DY];
                            level[index + INDEX] = line;
                            // SECTION: Portal rotation calculation
                            player[ROTATION] = (float) Math.acos(level[PORTAL1 + DX] * level[PORTAL2 + DX] + level[PORTAL1 + DY] * level[PORTAL2 + DY]) + PI;
                            if (level[PORTAL1 + DX] * level[PORTAL2 + DY] + level[PORTAL1 + DY] * level[PORTAL2 + DX] > 0) {
                                player[ROTATION] *= -1;
                            }
                        }
                    }
                    key[MOUSE_BUTTON] = 0;
                }
            }
            /**
             *******************
             * RENDER
             ********************
             */
            g.setColor(color[COLOR_BACKGROUND]);
            g.fillRect(0, 0, WIDTH, HEIGHT);
            final float px = player[X] / 2 + WIDTH / 4;
            final float py = player[Y] / 2 + HEIGHT / 4 - 30;
            // SECTION: Render Walls
            for (i = LEVEL_DATA; i < level.length; i += COMPONENTS) {
                project(faceX, faceY, level, i, px, py);
                // g.setColor((level[i + DIE] != 0) ? color[COLOR_WALL_DIE] : color[COLOR_WALL +
                // (int) level[i + DEAD]]);
                g.setColor(color[COLOR_WALL + (int) (level[i + DEAD] + 2 * level[i + DIE])]);
                final float dot = level[i + DX] * (faceY[3] - faceY[0]) - level[i + DY] * (faceX[3] - faceX[0]);
                if (dot >= 0) {
                    g.fillPolygon(faceX, faceY, 4);
                    g.setColor(color[COLOR_WALL_BORDER]);
                    g.drawPolygon(faceX, faceY, 4);
                }
            }
            // calc aim
            // final float aimx = player[AIMX] - player[X];
            // final float aimy = player[AIMY] - player[Y];
            // final float aimLength = (float) Math.sqrt(aimx * aimx + aimy * aimy);
            // final int ax = (int) (player[X] + 10 * aimx / aimLength);
            // final int ay = (int) (player[Y] - 2 + 10 * aimy / aimLength);
            // 
            // if ((int) intersect[X] != 0) {
            // i = (int) intersect[LINE_INDEX];
            // g.setColor(color[COLOR_WALL_DIE - 2 * ((int) level[i + DIE] + (int) level[i + DEAD])]);
            // g.drawLine(ax, ay, (int) intersect[X], (int) intersect[Y]);
            // }
            // SECTION: Render Solids
            k = 6;
            for (i = 0; i < solids.length; i++, k = 0) {
                final int length = solids[i].length >> 1;
                for (j = 0; j < length; j++) {
                    faceX[j] = (int) ((px * zNear - px + solids[i][2 * j + X]) / zNear);
                    faceY[j] = (int) ((py * zNear - py + solids[i][2 * j + Y]) / zNear);
                }
                g.setColor(color[COLOR_FACE]);
                g.fillPolygon(faceX, faceY, length);
                g.setColor(color[COLOR_FACE_BORDER]);
                g.drawPolygon(faceX, faceY, length - k);
            }
            // SECTION: Render Portals & Doors
            j = 0;
            for (i = 0; i < 3; i++, j += COMPONENTS) {
                if ((int) level[j + X] != 0) {
                    project(faceX, faceY, level, j, px, py);
                    g.setColor(color[COLOR_P1 + i]);
                    g.fillPolygon(faceX, faceY, 4);
                    g.setColor(color[COLOR_FACE_BORDER]);
                    g.drawPolygon(faceX, faceY, 4);
                }
            }
            // SECTION: Render Player
            g.setColor(color[COLOR_WHITE]);
            g.drawOval((int) player[X] - 2, (int) player[Y] - PLAYER_H, 4, 4);
            g.drawLine((int) player[X], (int) player[Y] - PLAYER_H + 4, (int) player[X], (int) player[Y]);
            g.drawLine((int) player[X], (int) player[Y], (int) player[X] + animation * 4, (int) player[Y] + PLAYER_H);
            g.drawLine((int) player[X], (int) player[Y], (int) player[X] - animation * 4, (int) player[Y] + PLAYER_H);
            // calc & draw aim
            final float aimx = player[AIMX] - player[X];
            final float aimy = player[AIMY] - player[Y];
            final float aimLength = (float) Math.sqrt(aimx * aimx + aimy * aimy);
            // final int ax = (int) (player[X] + 10 * aimx / aimLength);
            // final int ay = (int) (player[Y] - 2 + 10 * aimy / aimLength);
            // g.drawLine((int) player[X], (int) player[Y] - 2, ax, ay);
            g.drawLine((int) player[X], (int) player[Y] - 2, (int) (player[X] + 10 * aimx / aimLength), (int) (player[Y] - 2 + 10 * aimy / aimLength));
            if (gameState != 0) {
                g.setFont(bigFont);
                if ((gameState & STATE_GAME_COMPLETE) != 0) {
                    g.drawString("TESTS COMPLETE", 160, 400);
                }
                if ((gameState & STATE_LEVEL_START) != 0) {
                    g.drawString("TEST " + String.valueOf(levelCount + 1), 320, 400);
                }
                if ((gameState & STATE_DIED) != 0) {
                    g.drawString("REGENERATING", 220, 400);
                }
                g.setFont(smallFont);
                g.drawString("Press <SPACE> to continue", 320, 420);
            }
            // Draw the entire results on the screen.
            appletGraphics.drawImage(screen, 0, 0, null);
            Thread.sleep(10);
        }
    } catch (final Exception exc) {
        exc.printStackTrace();
    }
}
Also used : BufferedImage(java.awt.image.BufferedImage) SourceDataLine(javax.sound.sampled.SourceDataLine) AudioFormat(javax.sound.sampled.AudioFormat)

Example 99 with AudioFormat

use of javax.sound.sampled.AudioFormat in project javacv by bytedeco.

the class WebcamAndMicrophoneCapture method main.

public static void main(String[] args) throws Exception, org.bytedeco.javacv.FrameGrabber.Exception {
    int captureWidth = 1280;
    int captureHeight = 720;
    // The available FrameGrabber classes include OpenCVFrameGrabber (opencv_videoio),
    // DC1394FrameGrabber, FlyCaptureFrameGrabber, OpenKinectFrameGrabber,
    // PS3EyeFrameGrabber, VideoInputFrameGrabber, and FFmpegFrameGrabber.
    OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(WEBCAM_DEVICE_INDEX);
    grabber.setImageWidth(captureWidth);
    grabber.setImageHeight(captureHeight);
    grabber.start();
    // org.bytedeco.javacv.FFmpegFrameRecorder.FFmpegFrameRecorder(String
    // filename, int imageWidth, int imageHeight, int audioChannels)
    // For each param, we're passing in...
    // filename = either a path to a local file we wish to create, or an
    // RTMP url to an FMS / Wowza server
    // imageWidth = width we specified for the grabber
    // imageHeight = height we specified for the grabber
    // audioChannels = 2, because we like stereo
    FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("rtmp://my-streaming-server/app_name_here/instance_name/stream_name", captureWidth, captureHeight, 2);
    recorder.setInterleaved(true);
    // decrease "startup" latency in FFMPEG (see:
    // https://trac.ffmpeg.org/wiki/StreamingGuide)
    recorder.setVideoOption("tune", "zerolatency");
    // tradeoff between quality and encode speed
    // possible values are ultrafast,superfast, veryfast, faster, fast,
    // medium, slow, slower, veryslow
    // ultrafast offers us the least amount of compression (lower encoder
    // CPU) at the cost of a larger stream size
    // at the other end, veryslow provides the best compression (high
    // encoder CPU) while lowering the stream size
    // (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("preset", "ultrafast");
    // Constant Rate Factor (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("crf", "28");
    // 2000 kb/s, reasonable "sane" area for 720
    recorder.setVideoBitrate(2000000);
    recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
    recorder.setFormat("flv");
    // FPS (frames per second)
    recorder.setFrameRate(FRAME_RATE);
    // Key frame interval, in our case every 2 seconds -> 30 (fps) * 2 = 60
    // (gop length)
    recorder.setGopSize(GOP_LENGTH_IN_FRAMES);
    // We don't want variable bitrate audio
    recorder.setAudioOption("crf", "0");
    // Highest quality
    recorder.setAudioQuality(0);
    // 192 Kbps
    recorder.setAudioBitrate(192000);
    recorder.setSampleRate(44100);
    recorder.setAudioChannels(2);
    recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
    // Jack 'n coke... do it...
    recorder.start();
    // Thread for audio capture, this could be in a nested private class if you prefer...
    new Thread(new Runnable() {

        @Override
        public void run() {
            // Pick a format...
            // NOTE: It is better to enumerate the formats that the system supports,
            // because getLine() can error out with any particular format...
            // For us: 44.1 sample rate, 16 bits, stereo, signed, little endian
            AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
            // Get TargetDataLine with that format
            Mixer.Info[] minfoSet = AudioSystem.getMixerInfo();
            Mixer mixer = AudioSystem.getMixer(minfoSet[AUDIO_DEVICE_INDEX]);
            DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
            try {
                // Open and start capturing audio
                // It's possible to have more control over the chosen audio device with this line:
                // TargetDataLine line = (TargetDataLine)mixer.getLine(dataLineInfo);
                TargetDataLine line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
                line.open(audioFormat);
                line.start();
                int sampleRate = (int) audioFormat.getSampleRate();
                int numChannels = audioFormat.getChannels();
                // Let's initialize our audio buffer...
                int audioBufferSize = sampleRate * numChannels;
                byte[] audioBytes = new byte[audioBufferSize];
                // Using a ScheduledThreadPoolExecutor vs a while loop with
                // a Thread.sleep will allow
                // us to get around some OS specific timing issues, and keep
                // to a more precise
                // clock as the fixed rate accounts for garbage collection
                // time, etc
                // a similar approach could be used for the webcam capture
                // as well, if you wish
                ScheduledThreadPoolExecutor exec = new ScheduledThreadPoolExecutor(1);
                exec.scheduleAtFixedRate(new Runnable() {

                    @Override
                    public void run() {
                        try {
                            // Read from the line... non-blocking
                            int nBytesRead = 0;
                            while (nBytesRead == 0) {
                                nBytesRead = line.read(audioBytes, 0, line.available());
                            }
                            // Since we specified 16 bits in the AudioFormat,
                            // we need to convert our read byte[] to short[]
                            // (see source from FFmpegFrameRecorder.recordSamples for AV_SAMPLE_FMT_S16)
                            // Let's initialize our short[] array
                            int nSamplesRead = nBytesRead / 2;
                            short[] samples = new short[nSamplesRead];
                            // Let's wrap our short[] into a ShortBuffer and
                            // pass it to recordSamples
                            ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
                            ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
                            // recorder is instance of
                            // org.bytedeco.javacv.FFmpegFrameRecorder
                            recorder.recordSamples(sampleRate, numChannels, sBuff);
                        } catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
                            e.printStackTrace();
                        }
                    }
                }, 0, (long) 1000 / FRAME_RATE, TimeUnit.MILLISECONDS);
            } catch (LineUnavailableException e1) {
                e1.printStackTrace();
            }
        }
    }).start();
    // A really nice hardware accelerated component for our preview...
    CanvasFrame cFrame = new CanvasFrame("Capture Preview", CanvasFrame.getDefaultGamma() / grabber.getGamma());
    Frame capturedFrame = null;
    // While we are capturing...
    while ((capturedFrame = grabber.grab()) != null) {
        if (cFrame.isVisible()) {
            // Show our frame in the preview
            cFrame.showImage(capturedFrame);
        }
        // as the delta from assignment to computed time could be too high
        if (startTime == 0)
            startTime = System.currentTimeMillis();
        // Create timestamp for this frame
        videoTS = 1000 * (System.currentTimeMillis() - startTime);
        // Check for AV drift
        if (videoTS > recorder.getTimestamp()) {
            System.out.println("Lip-flap correction: " + videoTS + " : " + recorder.getTimestamp() + " -> " + (videoTS - recorder.getTimestamp()));
            // We tell the recorder to write this frame at this timestamp
            recorder.setTimestamp(videoTS);
        }
        // Send the frame to the org.bytedeco.javacv.FFmpegFrameRecorder
        recorder.record(capturedFrame);
    }
    cFrame.dispose();
    recorder.stop();
    grabber.stop();
}
Also used : Frame(org.bytedeco.javacv.Frame) CanvasFrame(org.bytedeco.javacv.CanvasFrame) ScheduledThreadPoolExecutor(java.util.concurrent.ScheduledThreadPoolExecutor) Mixer(javax.sound.sampled.Mixer) TargetDataLine(javax.sound.sampled.TargetDataLine) DataLine(javax.sound.sampled.DataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) LineUnavailableException(javax.sound.sampled.LineUnavailableException) Exception(org.bytedeco.javacv.FrameRecorder.Exception) TargetDataLine(javax.sound.sampled.TargetDataLine) FFmpegFrameRecorder(org.bytedeco.javacv.FFmpegFrameRecorder) AudioFormat(javax.sound.sampled.AudioFormat) ShortBuffer(java.nio.ShortBuffer) CanvasFrame(org.bytedeco.javacv.CanvasFrame)

Example 100 with AudioFormat

use of javax.sound.sampled.AudioFormat in project competitive-programming by ttvi-cse.

the class StdAudio method stream.

// https://www3.ntu.edu.sg/home/ehchua/programming/java/J8c_PlayingSound.html
// play a wav or aif file
// javax.sound.sampled.Clip fails for long clips (on some systems)
private static void stream(String filename) {
    SourceDataLine line = null;
    // 4K buffer
    int BUFFER_SIZE = 4096;
    try {
        InputStream is = StdAudio.class.getResourceAsStream(filename);
        AudioInputStream ais = AudioSystem.getAudioInputStream(is);
        AudioFormat audioFormat = ais.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(audioFormat);
        line.start();
        byte[] samples = new byte[BUFFER_SIZE];
        int count = 0;
        while ((count = ais.read(samples, 0, BUFFER_SIZE)) != -1) {
            line.write(samples, 0, count);
        }
    } catch (IOException e) {
        e.printStackTrace();
    } catch (UnsupportedAudioFileException e) {
        e.printStackTrace();
    } catch (LineUnavailableException e) {
        e.printStackTrace();
    } finally {
        if (line != null) {
            line.drain();
            line.close();
        }
    }
}
Also used : AudioInputStream(javax.sound.sampled.AudioInputStream) UnsupportedAudioFileException(javax.sound.sampled.UnsupportedAudioFileException) ByteArrayInputStream(java.io.ByteArrayInputStream) AudioInputStream(javax.sound.sampled.AudioInputStream) InputStream(java.io.InputStream) DataLine(javax.sound.sampled.DataLine) SourceDataLine(javax.sound.sampled.SourceDataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) SourceDataLine(javax.sound.sampled.SourceDataLine) IOException(java.io.IOException) AudioFormat(javax.sound.sampled.AudioFormat)

Aggregations

AudioFormat (javax.sound.sampled.AudioFormat)112 AudioInputStream (javax.sound.sampled.AudioInputStream)43 IOException (java.io.IOException)24 DataLine (javax.sound.sampled.DataLine)21 SourceDataLine (javax.sound.sampled.SourceDataLine)21 AudioFileFormat (javax.sound.sampled.AudioFileFormat)18 UnsupportedAudioFileException (javax.sound.sampled.UnsupportedAudioFileException)18 LineUnavailableException (javax.sound.sampled.LineUnavailableException)17 File (java.io.File)15 InputStream (java.io.InputStream)14 ByteArrayInputStream (java.io.ByteArrayInputStream)13 TargetDataLine (javax.sound.sampled.TargetDataLine)7 MpegAudioFormat (javazoom.spi.mpeg.sampled.file.MpegAudioFormat)7 BufferedInputStream (java.io.BufferedInputStream)6 FileInputStream (java.io.FileInputStream)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 DataInputStream (java.io.DataInputStream)5 Vector (java.util.Vector)5 SequenceInputStream (java.io.SequenceInputStream)4 Clip (javax.sound.sampled.Clip)4