Search in sources :

Example 1 with DFPWM

use of pl.asie.charset.lib.audio.codec.DFPWM in project Charset by CharsetMC.

the class TraitRecordPlayer method update.

public void update(World world, BlockPos blockPos) {
    if (state != State.STOPPED && state != State.PAUSED) {
        if (sourceId == null) {
            sourceId = AudioUtils.start();
        }
        boolean found = false;
        ItemStack stack = inventory.getStackInSlot(0);
        if (!stack.isEmpty() && stack.hasCapability(CharsetAudioStorage.DATA_STORAGE, null)) {
            IDataStorage storage = stack.getCapability(CharsetAudioStorage.DATA_STORAGE, null);
            if (storage != null) {
                found = true;
                if (state == State.PLAYING) {
                    int sampleRate = getSampleRate();
                    byte[] data = new byte[sampleRate / (20 * 8)];
                    int len = storage.read(data, false);
                    AudioPacket packet = new AudioPacket(new AudioDataDFPWM(data, 50).setSourceId(sourceId), 1.0F);
                    boolean received = false;
                    for (EnumFacing facing : EnumFacing.VALUES) {
                        TileEntity tile = world.getTileEntity(blockPos.offset(facing));
                        if (tile != null && tile.hasCapability(Capabilities.AUDIO_RECEIVER, facing.getOpposite())) {
                            received |= tile.getCapability(Capabilities.AUDIO_RECEIVER, facing.getOpposite()).receive(packet);
                        }
                    }
                    if (!received) {
                        new AudioSinkBlock(world, blockPos).receive(packet);
                    }
                    packet.send();
                    if (len < data.length) {
                        setState(State.PAUSED);
                    }
                } else if (state == State.RECORDING) {
                    // TODO: This should advance at a constant pace
                    int sampleRate = getSampleRate();
                    if (!receivedPacket.isEmpty()) {
                        int adLen = 0;
                        for (AudioPacket packet : receivedPacket) {
                            adLen = Math.max(adLen, packet.getData().getTime() * sampleRate / 1000);
                        }
                        boolean added = false;
                        byte[] audioData = new byte[adLen];
                        for (AudioPacket packet : receivedPacket) {
                            AudioData data = packet.getData();
                            if (data instanceof IAudioDataPCM && packet.getVolume() >= 0.01f) {
                                IAudioDataPCM pcm = (IAudioDataPCM) data;
                                int len = audioData.length * 50 / data.getTime();
                                if (len > 0) {
                                    byte[] preEncodeOutput = AudioResampler.toSigned8(pcm.getSamplePCMData(), pcm.getSampleSize() * 8, 1, pcm.isSampleBigEndian(), pcm.isSampleSigned(), pcm.getSampleRate(), sampleRate, false);
                                    if (preEncodeOutput != null) {
                                        added = true;
                                        if (packet.getVolume() >= 0.995f) {
                                            // fast path - no byte->float->byte casting
                                            for (int i = 0; i < Math.min(preEncodeOutput.length, audioData.length); i++) {
                                                audioData[i] += preEncodeOutput[i];
                                            }
                                        } else {
                                            for (int i = 0; i < Math.min(preEncodeOutput.length, audioData.length); i++) {
                                                audioData[i] += (byte) Math.round(preEncodeOutput[i] * packet.getVolume());
                                            }
                                        }
                                    }
                                }
                            }
                        }
                        if (added) {
                            if (recordDFPWM == null) {
                                recordDFPWM = new DFPWM();
                            }
                            byte[] dataOut = new byte[audioData.length / 8];
                            recordDFPWM.compress(dataOut, audioData, 0, 0, audioData.length / 8);
                            storage.write(dataOut);
                        }
                    }
                }
            }
        }
        if (!found) {
            setState(State.STOPPED);
        }
    }
    if (lastState != state) {
        TileEntity tileEntity = world.getTileEntity(blockPos);
        CharsetAudioStorage.packet.sendToWatching(new PacketDriveState((TileRecordPlayer) tileEntity, state), tileEntity);
        if ((state == State.STOPPED || state == State.PAUSED) && lastState == State.PLAYING && sourceId != null) {
            stopAudioPlayback();
        }
    }
    lastState = state;
    receivedPacket.clear();
}
Also used : AudioSinkBlock(pl.asie.charset.lib.audio.types.AudioSinkBlock) EnumFacing(net.minecraft.util.EnumFacing) IDataStorage(pl.asie.charset.api.tape.IDataStorage) TileEntity(net.minecraft.tileentity.TileEntity) AudioDataDFPWM(pl.asie.charset.lib.audio.types.AudioDataDFPWM) DFPWM(pl.asie.charset.lib.audio.codec.DFPWM) AudioDataDFPWM(pl.asie.charset.lib.audio.types.AudioDataDFPWM) ItemStack(net.minecraft.item.ItemStack)

Aggregations

ItemStack (net.minecraft.item.ItemStack)1 TileEntity (net.minecraft.tileentity.TileEntity)1 EnumFacing (net.minecraft.util.EnumFacing)1 IDataStorage (pl.asie.charset.api.tape.IDataStorage)1 DFPWM (pl.asie.charset.lib.audio.codec.DFPWM)1 AudioDataDFPWM (pl.asie.charset.lib.audio.types.AudioDataDFPWM)1 AudioSinkBlock (pl.asie.charset.lib.audio.types.AudioSinkBlock)1